All Files ( 67.77% covered at 3.75 hits/line )
257 files in total.
11957 relevant lines,
8103 lines covered and
3854 lines missed.
(
67.77%
)
-
# frozen_string_literal: true
-
-
1
module Hyrax
-
1
module Actors
-
-
1
class AbstractEventActor < Hyrax::Actors::AbstractActor
-
-
# LOG_IGNORE_EVENT = [].freeze
-
-
1
protected
-
-
1
def attributes_blank?( attributes )
-
return true if attributes.blank?
-
return true if [nil] == attributes
-
false
-
end
-
-
1
def log_event( env: )
-
# # return if LOG_IGNORE_EVENT.include? key
-
# actor = next_actor
-
# from = caller_locations(1, 2)[1]
-
# ::Deepblue::LoggingHelper.bold_debug ["from #{from}",
-
# "env.curation_concern.class.name=#{env.curation_concern.class.name}",
-
# "env.curation_concern.id=#{env.curation_concern&.id}",
-
# "next_actor=#{next_actor.class.name}",
-
# "env.attributes=#{env.attributes}" ]
-
# rescue Exception => e # rubocop:disable Lint/RescueException
-
# Rails.logger.error "log_event exception - #{e.class}: #{e.message} at #{e.backtrace[0]}"
-
end
-
-
end
-
-
end
-
end
-
# frozen_string_literal: true
-
-
1
module Hyrax
-
1
module Actors
-
-
1
class AfterOptimisticLockValidator < AbstractEventActor
-
-
# @param [Hyrax::Actors::Environment] env
-
# @return [Boolean] true if create was successful
-
1
def create( env )
-
env.log_event( next_actor: next_actor )
-
next_actor.create( env )
-
end
-
-
# @param [Hyrax::Actors::Environment] env
-
# @return [Boolean] true if destroy was successful
-
1
def destroy( env )
-
env.log_event( next_actor: next_actor )
-
next_actor.destroy( env )
-
end
-
-
# @param [Hyrax::Actors::Environment] env
-
# @return [Boolean] true if update was successful
-
1
def update( env )
-
env.log_event( next_actor: next_actor )
-
next_actor.update( env )
-
end
-
-
1
protected
-
-
# def log_event( env:, event: )
-
# actor = next_actor
-
# msg = "AfterOptimisticLockValidator.#{event}: env.curation_concern.class=#{env.curation_concern.class.name} next_actor=#{actor.class.name} env.attributes=#{ActiveSupport::JSON.encode env.attributes}"
-
# Deepblue::LoggingHelper.bold_debug( msg, lines: 2 )
-
# end
-
-
end
-
-
end
-
end
-
1
module Hyrax
-
1
module Actors
-
1
class ApplyOrderActor < AbstractActor
-
# @param [Hyrax::Actors::Environment] env
-
# @return [Boolean] true if update was successful
-
1
def update(env)
-
::Deepblue::LoggingHelper.bold_debug "ApplyOrderActor.update: next_actor = #{next_actor.class.name}"
-
ordered_member_ids = env.attributes.delete(:ordered_member_ids)
-
sync_members(env, ordered_member_ids) &&
-
apply_order(env.curation_concern, ordered_member_ids) &&
-
next_actor.update(env)
-
end
-
-
1
private
-
-
1
def can_edit_both_works?(env, work)
-
rv = env.current_ability.can?(:edit, work) && env.current_ability.can?(:edit, env.curation_concern)
-
# ::Deepblue::LoggingHelper.bold_debug "ApplyOrderActor.update: can_edit_both_works? = #{rv}"
-
rv
-
end
-
-
1
def sync_members(env, ordered_member_ids)
-
::Deepblue::LoggingHelper.bold_debug "ApplyOrderActor.sync_members ordered_member_ids = #{ordered_member_ids}"
-
return true if ordered_member_ids.nil?
-
cleanup_ids_to_remove_from_curation_concern(env.curation_concern, ordered_member_ids)
-
add_new_work_ids_not_already_in_curation_concern(env, ordered_member_ids)
-
env.curation_concern.errors[:ordered_member_ids].empty?
-
end
-
-
# @todo Why is this not doing work.save?
-
# @see Hyrax::Actors::AddToWorkActor for duplication
-
1
def cleanup_ids_to_remove_from_curation_concern(curation_concern, ordered_member_ids)
-
::Deepblue::LoggingHelper.bold_debug "ApplyOrderActor.cleanup_ids_to_remove_from_curation_concern ordered_member_ids = #{ordered_member_ids}"
-
(curation_concern.ordered_member_ids - ordered_member_ids).each do |old_id|
-
work = ::ActiveFedora::Base.find(old_id)
-
curation_concern.ordered_members.delete(work)
-
curation_concern.members.delete(work)
-
end
-
end
-
-
1
def add_new_work_ids_not_already_in_curation_concern(env, ordered_member_ids)
-
::Deepblue::LoggingHelper.bold_debug "ApplyOrderActor.add_new_work_ids_not_already_in_curation_concern ordered_member_ids = #{ordered_member_ids}"
-
(ordered_member_ids - env.curation_concern.ordered_member_ids).each do |work_id|
-
work = ::ActiveFedora::Base.find(work_id)
-
if can_edit_both_works?(env, work)
-
env.curation_concern.ordered_members << work
-
env.curation_concern.save!
-
else
-
env.curation_concern.errors[:ordered_member_ids] << "Works can only be related to each other if user has ability to edit both."
-
end
-
end
-
end
-
-
1
def apply_order(curation_concern, new_order)
-
::Deepblue::LoggingHelper.bold_debug "ApplyOrderActor.apply_order new_order = #{new_order}"
-
return true unless new_order
-
curation_concern.ordered_member_proxies.each_with_index do |proxy, index|
-
unless new_order[index]
-
proxy.prev.next = curation_concern.ordered_member_proxies.last.next
-
break
-
end
-
proxy.proxy_for = ActiveFedora::Base.id_to_uri(new_order[index])
-
proxy.target = nil
-
end
-
curation_concern.list_source.order_will_change!
-
true
-
end
-
end
-
end
-
end
-
1
module Hyrax
-
1
module Actors
-
# Attach or remove child works to/from this work. This decodes parameters
-
# that follow the rails nested parameters conventions:
-
# e.g.
-
# 'work_members_attributes' => {
-
# '0' => { 'id' => '12312412'},
-
# '1' => { 'id' => '99981228', '_destroy' => 'true' }
-
# }
-
#
-
# The goal of this actor is to mutate the ordered_members with as few writes
-
# as possible, because changing ordered_members is slow. This class only
-
# writes changes, not the full ordered list.
-
1
class AttachMembersActor < Hyrax::Actors::AbstractActor
-
# @param [Hyrax::Actors::Environment] env
-
# @return [Boolean] true if update was successful
-
1
def update(env)
-
# log_event( env: env )
-
attributes_collection = env.attributes.delete(:work_members_attributes)
-
::Deepblue::LoggingHelper.bold_debug "AttachMembersActor.update: next_actor = #{next_actor.class.name}"
-
assign_nested_attributes_for_collection(env, attributes_collection) &&
-
next_actor.update(env)
-
end
-
-
1
private
-
-
# Attaches any unattached members. Deletes those that are marked _delete
-
# @param [Hash<Hash>] a collection of members
-
1
def assign_nested_attributes_for_collection(env, attributes_collection)
-
return true unless attributes_collection
-
attributes_collection = attributes_collection.sort_by { |i, _| i.to_i }.map { |_, attributes| attributes }
-
# checking for existing works to avoid rewriting/loading works that are
-
# already attached
-
existing_works = env.curation_concern.member_ids
-
attributes_collection.each do |attributes|
-
next if attributes['id'].blank?
-
if existing_works.include?(attributes['id'])
-
remove(env.curation_concern, attributes['id']) if has_destroy_flag?(attributes)
-
else
-
add(env, attributes['id'])
-
end
-
end
-
end
-
-
# Adds the item to the ordered members so that it displays in the items
-
# along side the FileSets on the show page
-
1
def add2(env, id)
-
member = ActiveFedora::Base.find(id)
-
return unless env.current_ability.can?(:edit, member)
-
env.curation_concern.ordered_members << member
-
end
-
-
# Remove the object from the members set and the ordered members list
-
1
def remove2(curation_concern, id)
-
member = ActiveFedora::Base.find(id)
-
curation_concern.ordered_members.delete(member)
-
curation_concern.members.delete(member)
-
end
-
-
1
def add( env, id )
-
# ::Deepblue::LoggingHelper.bold_debug "AttachMembersActor.add: id = #{id}"
-
return if id.blank?
-
member = ActiveFedora::Base.find( id )
-
# is this check necessary?
-
can_do_it = env.current_ability.can?( :edit, member )
-
# ::Deepblue::LoggingHelper.bold_debug "AttachMembersActor.add: id = #{id} can_do_it = #{can_do_it}"
-
return unless can_do_it
-
# ::Deepblue::LoggingHelper.bold_debug "AttachMembersActor.add: adding ordered member id = #{id}"
-
env.curation_concern.ordered_members << member
-
-
return unless env.curation_concern.respond_to? :provenance_child_add
-
current_user = env.user
-
env.curation_concern.provenance_child_add( current_user: current_user,
-
child_id: id,
-
event_note: "AttachMembersActor" )
-
end
-
-
# Remove the object from the members set and the ordered members list
-
1
def remove( curation_concern, id )
-
# ::Deepblue::LoggingHelper.bold_debug "AttachMembersActor.remove: id = #{id}"
-
return if id.blank?
-
member = ActiveFedora::Base.find(id)
-
curation_concern.ordered_members.delete(member)
-
curation_concern.members.delete(member)
-
return unless curation_concern.respond_to? :provenance_child_remove
-
curation_concern.provenance_child_remove( current_user: current_user,
-
child_id: id,
-
event_note: "AttachMembersActor" )
-
end
-
-
# Determines if a hash contains a truthy _destroy key.
-
# rubocop:disable Naming/PredicateName
-
1
def has_destroy_flag?(hash)
-
ActiveFedora::Type::Boolean.new.cast(hash['_destroy'])
-
end
-
# rubocop:enable Naming/PredicateName
-
end
-
end
-
end
-
# frozen_string_literal: true
-
-
1
module Hyrax
-
1
module Actors
-
-
# see AddToWorkActor
-
# provenance logging
-
1
class BeforeAddToWorkActor < AbstractEventActor
-
-
# @param [Hyrax::Actors::Environment] env
-
# @return [Boolean] true if create was successful
-
1
def create( env )
-
env.log_event( next_actor: next_actor )
-
work_ids = env.attributes.values_at( :in_works_ids )
-
Deepblue::LoggingHelper.bold_debug "BeforeAddToWorkActor.create: next_actor = #{next_actor.class.name}, work_ids=#{work_ids}"
-
actor = next_actor
-
actor.create( env ) && add_to_works( env, work_ids )
-
end
-
-
# @param [Hyrax::Actors::Environment] env
-
# @return [Boolean] true if update was successful
-
1
def update( env )
-
env.log_event( next_actor: next_actor )
-
work_ids = env.attributes.values_at( :in_works_ids )
-
Deepblue::LoggingHelper.bold_debug "BeforeAddToWorkActor.update: next_actor = #{next_actor.class.name}, work_ids=#{work_ids}"
-
actor = next_actor
-
add_to_works( env, work_ids ) && actor.update( env )
-
end
-
-
1
protected
-
-
1
def add_new_work_ids_not_already_in_curation_concern( env, new_work_ids )
-
# add to new so long as the depositor for the parent and child matches, otherwise igmore
-
(new_work_ids - env.curation_concern.in_works_ids).each do |work_id|
-
work = ::ActiveFedora::Base.find( work_id )
-
next unless work.respond_to? :provenance_child_add
-
if can_edit_both_works?( env, work )
-
work.provenance_child_add( current_user: env.user, child_id: env.curation_concern.id, event_note: 'BeforeAddToWorkActor' )
-
end
-
end
-
end
-
-
1
def add_to_works( env, new_work_ids )
-
return true if attributes_blank? new_work_ids
-
cleanup_ids_to_remove_from_curation_concern( env, new_work_ids )
-
add_new_work_ids_not_already_in_curation_concern(env, new_work_ids)
-
env.curation_concern.errors[:in_works_ids].empty?
-
end
-
-
1
def can_edit_both_works?( env, work )
-
env.current_ability.can?( :edit, work ) && env.current_ability.can?( :edit, env.curation_concern )
-
end
-
-
1
def cleanup_ids_to_remove_from_curation_concern( env, new_work_ids )
-
(env.curation_concern.in_works_ids - new_work_ids).each do |old_id|
-
work = ::ActiveFedora::Base.find( old_id )
-
next unless work.respond_to? :provenance_child_remove
-
work.provenance_child_remove( current_user: env.user, child_id: env.curation_concern.id, event_note: 'BeforeAddToWorkActor' )
-
end
-
end
-
-
end
-
-
end
-
end
-
# frozen_string_literal: true
-
-
1
module Hyrax
-
1
module Actors
-
-
# see AttachMembersActor for original code
-
# Provenance logging for:
-
#
-
# Attach or remove child works to/from this work. This decodes parameters
-
# that follow the rails nested parameters conventions:
-
# e.g.
-
# 'work_members_attributes' => {
-
# '0' => { 'id' = '12312412'},
-
# '1' => { 'id' = '99981228', '_destroy' => 'true' }
-
# }
-
-
1
class BeforeAttachMembersActor < AbstractEventActor
-
-
# @param [Hyrax::Actors::Environment] env
-
# @return [Boolean] true if update was successful
-
1
def update( env )
-
env.log_event( next_actor: next_actor )
-
attributes_collection = env.attributes.values_at( :work_members_attributes )
-
Deepblue::LoggingHelper.bold_debug "BeforeAttachMembersActor.update: next_actor = #{next_actor.class.name}"
-
assign_nested_attributes_for_collection( env, attributes_collection ) && next_actor.update( env )
-
end
-
-
1
protected
-
-
# Provenance log for attaching and unattaching members.
-
# @param [Hash<Hash>] a collection of members
-
1
def assign_nested_attributes_for_collection( env, attributes_collection )
-
return true if attributes_blank? attributes_collection
-
return true unless env.curation_concern.respond_to? :provenance_child_add
-
-
attributes_collection = attributes_collection.first if attributes_collection.is_a? Array
-
attributes_collection = attributes_collection.sort_by { |i, _| i.to_i }.map { |_, attributes| attributes }
-
# checking for existing works to avoid rewriting/loading works that are already attached
-
existing_works = env.curation_concern.member_ids
-
current_user = env.user
-
attributes_collection.each do |attributes|
-
next if attributes['id'].blank?
-
if existing_works.include?( attributes['id'] )
-
remove( env, attributes['id'], current_user ) if has_destroy_flag?( attributes )
-
else
-
add( env, attributes['id'], current_user )
-
end
-
end
-
end
-
-
# provenance log: Adds the item to the ordered members so that it displays in the items
-
# along side the FileSets on the show page
-
1
def add( env, id, current_user )
-
member = ActiveFedora::Base.find( id )
-
return unless env.current_ability.can?( :edit, member )
-
env.curation_concern.provenance_child_add( current_user: current_user, child_id: id, event_note: 'BeforeAttachMembersActor' )
-
end
-
-
# Determines if a hash contains a truthy _destroy key.
-
# rubocop:disable Style/PredicateName
-
1
def has_destroy_flag?( hash )
-
ActiveFedora::Type::Boolean.new.cast( hash['_destroy'] )
-
end
-
# rubocop:enable Style/PredicateName
-
-
# provenance log for: Remove the object from the members set and the ordered members list
-
1
def remove( env, id, current_user )
-
env.curation_concern.provenance_child_remove( current_user: current_user, child_id: id, event_note: 'BeforeAttachMembersActor' )
-
end
-
-
end
-
-
end
-
end
-
# frozen_string_literal: true
-
-
1
module Hyrax
-
1
module Actors
-
-
1
class BeforeModelActor < AbstractEventActor
-
-
# @param [Hyrax::Actors::Environment] env
-
# @return [Boolean] true if create was successful
-
1
def create( env )
-
env.log_event( next_actor: next_actor )
-
next_actor.create(env)
-
end
-
-
# @param [Hyrax::Actors::Environment] env
-
# @return [Boolean] true if destroy was successful
-
1
def destroy( env )
-
env.log_event( next_actor: next_actor )
-
next_actor.destroy(env)
-
end
-
-
# @param [Hyrax::Actors::Environment] env
-
# @return [Boolean] true if update was successful
-
1
def update( env )
-
env.log_event( next_actor: next_actor )
-
next_actor.update(env)
-
end
-
-
1
protected
-
-
# def log_before_event( env:, event: )
-
# actor = next_actor
-
# Deepblue::LoggingHelper.bold_debug "BeforeModelActor.#{event}: env.curation_concern.class=#{env.curation_concern.class.name} next_actor = #{actor.class.name}"
-
# end
-
-
# def model_actor(env)
-
# actor_identifier = env.curation_concern.class
-
# klass = "Hyrax::Actors::#{actor_identifier}Actor".constantize
-
# klass.new(next_actor)
-
# end
-
-
end
-
-
end
-
end
-
# frozen_string_literal: true
-
-
1
module Hyrax
-
-
1
module Actors
-
-
# Creates a work and attaches files to the work
-
1
class CreateWithFilesActor < Hyrax::Actors::AbstractActor
-
-
# @param [Hyrax::Actors::Environment] env
-
# @return [Boolean] true if create was successful
-
1
def create(env)
-
uploaded_file_ids = filter_file_ids(env.attributes.delete(:uploaded_files))
-
files = uploaded_files(uploaded_file_ids)
-
validate_files(files, env) && next_actor.create(env) && attach_files(files, env)
-
end
-
-
# @param [Hyrax::Actors::Environment] env
-
# @return [Boolean] true if update was successful
-
1
def update(env)
-
uploaded_file_ids = filter_file_ids(env.attributes.delete(:uploaded_files))
-
files = uploaded_files(uploaded_file_ids)
-
validate_files(files, env) && next_actor.update(env) && attach_files(files, env)
-
end
-
-
1
private
-
-
1
def filter_file_ids(input)
-
Array.wrap(input).select(&:present?)
-
end
-
-
# ensure that the files we are given are owned by the depositor of the work
-
1
def validate_files(files, env)
-
expected_user_id = env.user.id
-
files.each do |file|
-
if file.user_id != expected_user_id
-
Rails.logger.error "User #{env.user.user_key} attempted to ingest uploaded_file #{file.id}, but it belongs to a different user"
-
return false
-
end
-
end
-
true
-
end
-
-
# @return [TrueClass]
-
1
def attach_files(files, env)
-
return true if files.blank?
-
AttachFilesToWorkJob.perform_later( env.curation_concern, files, env.user.user_key, env.attributes.to_h.symbolize_keys )
-
true
-
end
-
-
# Fetch uploaded_files from the database
-
1
def uploaded_files(uploaded_file_ids)
-
return [] if uploaded_file_ids.empty?
-
UploadedFile.find(uploaded_file_ids)
-
end
-
-
end
-
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
module Hyrax
-
1
module Actors
-
-
1
class DataSetActor < Hyrax::Actors::BaseActor
-
-
# Cast any singular values from the form to multiple values for persistence
-
1
def clean_attributes(attributes)
-
# attributes[:rights_license] = Array(attributes[:rights_license]) if attributes.key? :rights_license
-
super( attributes )
-
end
-
-
end
-
-
end
-
end
-
# Generated via
-
# `rails generate hyrax:work Dissertation`
-
1
module Hyrax
-
1
module Actors
-
1
class DissertationActor < Hyrax::Actors::BaseActor
-
end
-
end
-
end
-
# Generated via
-
# `rails generate hyrax:work GenericWork`
-
1
module Hyrax
-
1
module Actors
-
1
class GenericWorkActor < Hyrax::Actors::BaseActor
-
end
-
end
-
end
-
-
1
require File.join(Gem::Specification.find_by_name("hyrax").full_gem_path, "app/actors/hyrax/actors/interpret_visibility_actor.rb")
-
-
1
module Hyrax
-
-
1
module Actors
-
-
# monkey patch to allow embargo_release_date to be in the past
-
1
class InterpretVisibilityActor < AbstractActor
-
-
1
private
-
-
# When specified, validate embargo is a future date that complies with AdminSet template requirements (if any)
-
1
def validate_embargo(env, intention, attributes, template)
-
return true unless intention.wants_embargo?
-
-
embargo_release_date = parse_date(attributes[:embargo_release_date])
-
# Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
# Deepblue::LoggingHelper.called_from,
-
# "before test",
-
# "attributes=#{attributes}",
-
# "embargo_release_date=#{embargo_release_date}" ]
-
-
valid_embargo_release_date = DeepBlueDocs::Application.config.embargo_enforce_future_release_date ? valid_future_date?(env, embargo_release_date) : true
-
# valid_template_embargo_date = valid_template_embargo_date?(env, embargo_release_date, template)
-
# valid_template_visibility_after_embargo = valid_template_visibility_after_embargo?(env, attributes, template)
-
# Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
# Deepblue::LoggingHelper.called_from,
-
# "after test",
-
# "valid_embargo_release_date=#{valid_embargo_release_date}",
-
# "valid_template_embargo_date=#{valid_template_embargo_date}",
-
# "valid_template_visibility_after_embargo=#{valid_template_visibility_after_embargo}" ]
-
-
# When embargo required, date must be in future AND matches any template requirements
-
return true if valid_embargo_release_date &&
-
valid_template_embargo_date?(env, embargo_release_date, template) &&
-
valid_template_visibility_after_embargo?(env, attributes, template)
-
-
env.curation_concern.errors.add(:visibility, 'When setting visibility to "embargo" you must also specify embargo release date.') if embargo_release_date.blank?
-
false
-
end
-
-
end
-
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
module Hyrax
-
-
1
module Actors
-
# Validates that the submitted version is the most recent version in the datastore.
-
# Caveat: we are not detecting if the version is changed by a different process between
-
# the time this validator is run and when the object is saved
-
1
class OptimisticLockValidator < Actors::AbstractActor
-
1
class_attribute :version_field
-
1
self.version_field = 'version'
-
-
# @param [Hyrax::Actors::Environment] env
-
# @return [Boolean] true if update was successful
-
1
def update(env)
-
# log_event( env: env )
-
validate_lock(env, version_attribute(env.attributes)) && next_actor.update(env)
-
end
-
-
1
private
-
-
# @return [Boolean] returns true if the lock is missing or
-
# if it matches the current object version.
-
1
def validate_lock(env, version)
-
return true if version.blank? || version == env.curation_concern.etag
-
env.curation_concern.errors.add(:base, :conflict)
-
false
-
end
-
-
# Removes the version attribute
-
1
def version_attribute(attributes)
-
attributes.delete(version_field)
-
end
-
end
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
class ApplicationController < ActionController::Base
-
1
helper Openseadragon::OpenseadragonHelper
-
# Adds a few additional behaviors into the application controller
-
1
include Blacklight::Controller
-
1
skip_after_action :discard_flash_if_xhr
-
1
include Hydra::Controller::ControllerBehavior
-
-
# Behavior for devise. Use remote user field in http header for auth.
-
1
include Devise::Behaviors::HttpHeaderAuthenticatableBehavior
-
-
# Adds Hyrax behaviors into the application controller
-
1
include Hyrax::Controller
-
1
include Hyrax::ThemedLayoutController
-
1
with_themed_layout '1_column'
-
-
1
protect_from_forgery with: :exception
-
-
1
around_action :global_request_logging
-
-
1
def global_request_logging
-
14
logger.info "ACCESS: #{request.remote_ip}, #{request.method} #{request.url}, #{request.headers['HTTP_USER_AGENT']}"
-
begin
-
14
yield
-
ensure
-
14
logger.info "response_status: #{response.status}"
-
end
-
end
-
-
# From PSU's ScholarSphere
-
# Clears any user session and authorization information by:
-
# * forcing the session to be restarted on every request
-
# * ensuring the user will be logged out if REMOTE_USER is not set
-
# * clearing the entire session including flash messages
-
1
def clear_session_user
-
# ::Deepblue::LoggingHelper.bold_debug [ ::Deepblue::LoggingHelper.here,
-
# ::Deepblue::LoggingHelper.called_from,
-
# "[AUTHN] clear_session_user: #{current_user.try(:email) || '(no user)'}",
-
# "request=#{request}",
-
# # "request&.keys=#{request&.keys}",
-
# "session=#{session}",
-
# "session&.keys=#{session&.keys}",
-
# "params=#{params}",
-
# "params.keys=#{params.keys}",
-
# "" ]
-
return nil_request if request.nil?
-
# ::Deepblue::LoggingHelper.bold_debug [ ::Deepblue::LoggingHelper.here,
-
# ::Deepblue::LoggingHelper.called_from,
-
# "[AUTHN] clear_session_user: #{current_user.try(:email) || '(no user)'} request not nil",
-
# "params=#{params}",
-
# "" ]
-
search = session[:search].dup if session[:search]
-
flash = session[:flash].dup if session[:flash]
-
request.env['warden'].logout unless user_logged_in?
-
# ::Deepblue::LoggingHelper.bold_debug [ ::Deepblue::LoggingHelper.here,
-
# ::Deepblue::LoggingHelper.called_from,
-
# "[AUTHN] clear_session_user: #{current_user.try(:email) || '(no user)'}",
-
# "After: request.env['warden'].logout unless user_logged_in?",
-
# "request=#{request}",
-
# # "request&.keys=#{request&.keys}",
-
# "session=#{session}",
-
# "session&.keys=#{session&.keys}",
-
# "params=#{params}",
-
# "params.keys=#{params.keys}",
-
# "" ]
-
session[:search] = search if search
-
session[:flash] = flash if flash
-
# ::Deepblue::LoggingHelper.bold_debug [ ::Deepblue::LoggingHelper.here,
-
# ::Deepblue::LoggingHelper.called_from,
-
# "[AUTHN] clear_session_user: #{current_user.try(:email) || '(no user)'}",
-
# "After: session search and flash restored",
-
# "request=#{request}",
-
# # "request&.keys=#{request&.keys}",
-
# "session=#{session}",
-
# "session&.keys=#{session&.keys}",
-
# "params=#{params}",
-
# "params.keys=#{params.keys}",
-
# "" ]
-
end
-
-
1
Warden::Manager.after_authentication do |user, auth, opts|
-
Rails.logger.debug "[AUTHN] Warden after_authentication (clearing flash): #{user}"
-
# ::Deepblue::LoggingHelper.bold_debug [ ::Deepblue::LoggingHelper.here,
-
# ::Deepblue::LoggingHelper.called_from,
-
# "[AUTHN] Warden after_authentication (clearing flash): #{user}",
-
# "" ]
-
auth.request.flash.clear
-
end
-
-
1
rescue_from ActionController::UnknownFormat, with: :rescue_404
-
-
1
def rescue_404
-
1
render file: Rails.public_path.join('404.html'), status: :not_found, layout: true
-
end
-
-
1
before_action :set_locale
-
-
1
def set_locale
-
12
if params[:locale].present?
-
I18n.locale = constrained_locale || I18n.default_locale
-
params[:locale] = I18n.locale.to_s
-
end
-
end
-
-
1
def constrained_locale
-
return params[:locale] if params[:locale].in?(Object.new.extend(HyraxHelper).available_translations)
-
end
-
end
-
# frozen_string_literal: true
-
-
1
class CatalogController < ApplicationController
-
1
include Hydra::Catalog
-
1
include Hydra::Controller::ControllerBehavior
-
-
# This filter applies the hydra access controls
-
1
before_action :enforce_show_permissions, only: :show
-
-
1
def self.uploaded_field
-
2
solr_name('system_create', :stored_sortable, type: :date)
-
end
-
-
1
def self.modified_field
-
3
solr_name('system_modified', :stored_sortable, type: :date)
-
end
-
-
1
configure_blacklight do |config|
-
1
config.view.gallery.partials = %i[index_header index]
-
1
config.view.masonry.partials = [:index]
-
1
config.view.slideshow.partials = [:index]
-
-
# set maximum results per page to support bootstrap page sorting
-
# in dashboard.
-
# config.max_per_page = 1000000
-
-
1
config.show.tile_source_field = :content_metadata_image_iiif_info_ssm
-
1
config.show.partials.insert(1, :openseadragon)
-
1
config.search_builder_class = Hyrax::CatalogSearchBuilder
-
-
# Show gallery view
-
1
config.view.gallery.partials = %i[index_header index]
-
1
config.view.slideshow.partials = [:index]
-
-
## Default parameters to send to solr for all search-like requests. See also SolrHelper#solr_search_params
-
1
config.default_solr_params = {
-
qt: "search",
-
rows: 10,
-
# qf: "title_tesim description_tesim creator_tesim keyword_tesim"
-
qf: "title_tesim name_tesim creator_tesim description_tesim grantnumber_tesim methodology_tesim subject_tesim keyword_tesim referenced_by_tesim all_text_timv"
-
}
-
-
# solr field configuration for document/show views
-
1
config.index.title_field = solr_name("title", :stored_searchable)
-
1
config.index.display_type_field = solr_name("has_model", :symbol)
-
1
config.index.thumbnail_field = 'thumbnail_path_ss'
-
-
# solr fields that will be treated as facets by the blacklight application
-
# The ordering of the field names is the order of the display
-
#config.add_facet_field solr_name("human_readable_type", :facetable), label: "Type", limit: 5
-
1
config.add_facet_field solr_name("resource_type", :facetable), label: "Resource Type", limit: 5
-
1
config.add_facet_field solr_name("creator", :facetable), limit: 5
-
#config.add_facet_field solr_name("contributor", :facetable), label: "Contributor", limit: 5
-
#config.add_facet_field solr_name("keyword", :facetable), limit: 5
-
#config.add_facet_field solr_name("subject", :facetable), limit: 5
-
1
config.add_facet_field solr_name("subject_discipline", :facetable), label: "Discipline", limit: 5
-
1
config.add_facet_field solr_name("language", :facetable), limit: 5
-
#config.add_facet_field solr_name("based_near_label", :facetable), limit: 5
-
#config.add_facet_field solr_name("publisher", :facetable), limit: 5
-
#config.add_facet_field solr_name("file_format", :facetable), limit: 5
-
#config.add_facet_field solr_name('member_of_collections', :symbol), limit: 5, label: 'Collections'
-
-
# The generic_type isn't displayed on the facet list
-
# It's used to give a label to the filter that comes from the user profile
-
1
config.add_facet_field solr_name("generic_type", :facetable), if: false
-
-
# Have BL send all facet field names to Solr, which has been the default
-
# previously. Simply remove these lines if you'd rather use Solr request
-
# handler defaults, or have no facets.
-
1
config.add_facet_fields_to_solr_request!
-
-
# solr fields to be displayed in the index (search results) view
-
# The ordering of the field names is the order of the display
-
1
config.add_index_field solr_name("title", :stored_searchable), label: "Title", itemprop: 'name', if: false
-
1
config.add_index_field solr_name("creator", :stored_searchable), itemprop: 'creator', link_to_search: solr_name("creator", :facetable)
-
1
config.add_index_field solr_name("description", :stored_searchable), itemprop: 'description', helper_method: :iconify_auto_link
-
1
config.add_index_field solr_name("keyword", :stored_searchable), itemprop: 'keywords', link_to_search: solr_name("keyword", :facetable)
-
1
config.add_index_field solr_name("referenced_by", :stored_searchable), itemprop: 'referenced_by', label: "Citation to related publication", helper_method: :iconify_auto_link
-
1
config.add_index_field solr_name("subject_discipline", :stored_searchable), itemprop: 'subject_discipline', label: "Discipline", link_to_search: solr_name("subject_discipline", :facetable)
-
-
# This was the default that came with hyrax.
-
#config.add_index_field solr_name("title", :stored_searchable), label: "Title", itemprop: 'name', if: false
-
#config.add_index_field solr_name("description", :stored_searchable), itemprop: 'description', helper_method: :iconify_auto_link
-
#config.add_index_field solr_name("keyword", :stored_searchable), itemprop: 'keywords', link_to_search: solr_name("keyword", :facetable)
-
#config.add_index_field solr_name("subject", :stored_searchable), itemprop: 'about', link_to_search: solr_name("subject", :facetable)
-
#config.add_index_field solr_name("subject_discipline", :stored_searchable), label: "Discipline", link_to_search: solr_name("subject_discipline", :facetable)
-
#config.add_index_field solr_name("creator", :stored_searchable), itemprop: 'creator', link_to_search: solr_name("creator", :facetable)
-
#config.add_index_field solr_name("contributor", :stored_searchable), itemprop: 'contributor', link_to_search: solr_name("contributor", :facetable)
-
#config.add_index_field solr_name("proxy_depositor", :symbol), label: "Depositor", helper_method: :link_to_profile
-
#config.add_index_field solr_name("depositor"), label: "Owner", helper_method: :link_to_profile
-
#config.add_index_field solr_name("publisher", :stored_searchable), itemprop: 'publisher', link_to_search: solr_name("publisher", :facetable)
-
#config.add_index_field solr_name("based_near_label", :stored_searchable), itemprop: 'contentLocation', link_to_search: solr_name("based_near_label", :facetable)
-
#config.add_index_field solr_name("language", :stored_searchable), itemprop: 'inLanguage', link_to_search: solr_name("language", :facetable)
-
#config.add_index_field solr_name("date_uploaded", :stored_sortable, type: :date), itemprop: 'datePublished', helper_method: :human_readable_date
-
#config.add_index_field solr_name("date_modified", :stored_sortable, type: :date), itemprop: 'dateModified', helper_method: :human_readable_date
-
#config.add_index_field solr_name("date_created", :stored_searchable), itemprop: 'dateCreated', helper_method: :human_readable_date
-
#config.add_index_field solr_name("rights_license", :stored_searchable), helper_method: :rights_license_links
-
#config.add_index_field solr_name("rights_statement", :stored_searchable), helper_method: :rights_statement_links
-
#config.add_index_field solr_name("license", :stored_searchable), helper_method: :license_links
-
#config.add_index_field "total_file_size_lts", label: "Total File Size", helper_method: :human_readable_file_size
-
#config.add_index_field solr_name("resource_type", :stored_searchable), label: "Resource Type", link_to_search: solr_name("resource_type", :facetable)
-
#config.add_index_field solr_name("file_format", :stored_searchable), link_to_search: solr_name("file_format", :facetable)
-
#config.add_index_field solr_name("identifier", :stored_searchable), helper_method: :index_field_link, field_name: 'identifier'
-
#config.add_index_field solr_name("embargo_release_date", :stored_sortable, type: :date), label: "Embargo release date", helper_method: :human_readable_date
-
#config.add_index_field solr_name("lease_expiration_date", :stored_sortable, type: :date), label: "Lease expiration date", helper_method: :human_readable_date
-
-
#To be able to sort by title
-
1
config.add_index_field solr_name("title", :stored_sortable, type: :string), label: "Title"
-
-
# solr fields to be displayed in the show (single result) view
-
# The ordering of the field names is the order of the display
-
1
config.add_show_field solr_name("title", :stored_searchable)
-
1
config.add_show_field solr_name("description", :stored_searchable)
-
1
config.add_show_field solr_name("keyword", :stored_searchable)
-
1
config.add_show_field solr_name("subject", :stored_searchable)
-
1
config.add_show_field solr_name("subject_discipline", :stored_searchable)
-
1
config.add_show_field solr_name("creator", :stored_searchable)
-
1
config.add_show_field solr_name("contributor", :stored_searchable)
-
1
config.add_show_field solr_name("publisher", :stored_searchable)
-
1
config.add_show_field solr_name("based_near_label", :stored_searchable)
-
1
config.add_show_field solr_name("language", :stored_searchable)
-
1
config.add_show_field solr_name("date_created", :stored_searchable)
-
1
config.add_show_field solr_name("date_modified", :stored_searchable)
-
1
config.add_show_field solr_name("date_published", :stored_searchable), label: "Published"
-
1
config.add_show_field "date_published_dtsim"
-
1
config.add_show_field solr_name("date_uploaded", :stored_searchable)
-
1
config.add_show_field solr_name("rights_license", :stored_searchable)
-
1
config.add_show_field solr_name("rights_statement", :stored_searchable)
-
1
config.add_show_field solr_name("license", :stored_searchable)
-
1
config.add_show_field "total_file_size_lts"
-
1
config.add_show_field solr_name("resource_type", :stored_searchable), label: "Resource Type"
-
1
config.add_show_field solr_name("format", :stored_searchable)
-
1
config.add_show_field solr_name("identifier", :stored_searchable)
-
1
config.add_show_field solr_name("referenced_by", :stored_searchable), label: "Citation to related publication"
-
-
1
config.add_show_field solr_name("date_coverage", :stored_searchable), label: "Date Coverage"
-
-
# "fielded" search configuration. Used by pulldown among other places.
-
# For supported keys in hash, see rdoc for Blacklight::SearchFields
-
#
-
# Search fields will inherit the :qt solr request handler from
-
# config[:default_solr_parameters], OR can specify a different one
-
# with a :qt key/value. Below examples inherit, except for subject
-
# that specifies the same :qt as default for our own internal
-
# testing purposes.
-
#
-
# The :key is what will be used to identify this BL search field internally,
-
# as well as in URLs -- so changing it after deployment may break bookmarked
-
# urls. A display label will be automatically calculated from the :key,
-
# or can be specified manually to be different.
-
#
-
# This one uses all the defaults set by the solr request handler. Which
-
# solr request handler? The one set in config[:default_solr_parameters][:qt],
-
# since we aren't specifying it otherwise.
-
1
config.add_search_field('all_fields', label: 'All Fields') do |field|
-
1
all_names = config.show_fields.values.map(&:field).join(" ")
-
1
title_name = solr_name("title", :stored_searchable)
-
1
field.solr_parameters = {
-
qf: "#{all_names} file_format_tesim all_text_timv",
-
pf: title_name.to_s
-
}
-
end
-
-
# Now we see how to over-ride Solr request handler defaults, in this
-
# case for a BL "search field", which is really a dismax aggregate
-
# of Solr search fields.
-
# creator, title, description, publisher, date_created,
-
# subject, language, resource_type, format, identifier, based_near,
-
-
1
config.add_search_field('based_near') do |field|
-
1
field.label = "Location"
-
1
solr_name = solr_name("based_near_label", :stored_searchable)
-
1
field.solr_local_parameters = {
-
qf: solr_name,
-
pf: solr_name
-
}
-
end
-
-
1
config.add_search_field('contributor') do |field|
-
# solr_parameters hash are sent to Solr as ordinary url query params.
-
-
# :solr_local_parameters will be sent using Solr LocalParams
-
# syntax, as eg {! qf=$title_qf }. This is neccesary to use
-
# Solr parameter de-referencing like $title_qf.
-
# See: http://wiki.apache.org/solr/LocalParams
-
1
solr_name = solr_name("contributor", :stored_searchable)
-
1
field.solr_local_parameters = {
-
qf: solr_name,
-
pf: solr_name
-
}
-
end
-
-
1
config.add_search_field('creator') do |field|
-
1
solr_name = solr_name("creator", :stored_searchable)
-
1
field.solr_local_parameters = {
-
qf: solr_name,
-
pf: solr_name
-
}
-
end
-
-
1
config.add_search_field('date_created') do |field|
-
1
solr_name = solr_name("created", :stored_searchable)
-
1
field.solr_local_parameters = {
-
qf: solr_name,
-
pf: solr_name
-
}
-
end
-
-
1
config.add_search_field('depositor') do |field|
-
1
solr_name = solr_name("depositor", :symbol)
-
1
field.solr_local_parameters = {
-
qf: solr_name,
-
pf: solr_name
-
}
-
end
-
-
1
config.add_search_field('description') do |field|
-
1
field.label = "Abstract or Summary"
-
1
solr_name = solr_name("description", :stored_searchable)
-
1
field.solr_local_parameters = {
-
qf: solr_name,
-
pf: solr_name
-
}
-
end
-
-
1
config.add_search_field('doi') do |field|
-
1
field.label = "Doi"
-
1
solr_name = solr_name("doi_label", :stored_searchable)
-
1
field.solr_local_parameters = {
-
qf: solr_name,
-
pf: solr_name
-
}
-
end
-
-
1
config.add_search_field('format') do |field|
-
1
solr_name = solr_name("format", :stored_searchable)
-
1
field.solr_local_parameters = {
-
qf: solr_name,
-
pf: solr_name
-
}
-
end
-
-
1
config.add_search_field('fundedby') do |field|
-
1
field.label = "Funded By"
-
1
solr_name = solr_name("fundedby_label", :stored_searchable)
-
1
field.solr_local_parameters = {
-
qf: solr_name,
-
pf: solr_name
-
}
-
end
-
-
1
config.add_search_field('access_deepblue') do |field|
-
1
field.label = "Article access to DataCORE"
-
1
solr_name = solr_name("access_deepblue_label", :stored_searchable)
-
1
field.solr_local_parameters = {
-
qf: solr_name,
-
pf: solr_name
-
}
-
end
-
-
1
config.add_search_field('fundedby_other') do |field|
-
1
field.label = "Funded By Other"
-
1
solr_name = solr_name("fundedby_other_label", :stored_searchable)
-
1
field.solr_local_parameters = {
-
qf: solr_name,
-
pf: solr_name
-
}
-
end
-
-
1
config.add_search_field('grantnumber') do |field|
-
1
field.label = "Grant number"
-
1
solr_name = solr_name("grantnumber_label", :stored_searchable)
-
1
field.solr_local_parameters = {
-
qf: solr_name,
-
pf: solr_name
-
}
-
end
-
-
1
config.add_search_field('identifier') do |field|
-
1
solr_name = solr_name("id", :stored_searchable)
-
1
field.solr_local_parameters = {
-
qf: solr_name,
-
pf: solr_name
-
}
-
end
-
-
1
config.add_search_field('keyword') do |field|
-
1
solr_name = solr_name("keyword", :stored_searchable)
-
1
field.solr_local_parameters = {
-
qf: solr_name,
-
pf: solr_name
-
}
-
end
-
-
1
config.add_search_field('language') do |field|
-
1
solr_name = solr_name("language", :stored_searchable)
-
1
field.solr_local_parameters = {
-
qf: solr_name,
-
pf: solr_name
-
}
-
end
-
-
1
config.add_search_field('license') do |field|
-
1
solr_name = solr_name("license", :stored_searchable)
-
1
field.solr_local_parameters = {
-
qf: solr_name,
-
pf: solr_name
-
}
-
end
-
-
1
config.add_search_field('methodology') do |field|
-
1
field.label = "Methodology"
-
1
solr_name = solr_name("methodology_label", :stored_searchable)
-
1
field.solr_local_parameters = {
-
qf: solr_name,
-
pf: solr_name
-
}
-
end
-
-
1
config.add_search_field('prior_identifier') do |field|
-
1
field.label = "Prior Identifier"
-
1
solr_name = solr_name("prior_identifier", :stored_searchable)
-
1
field.solr_local_parameters = {
-
qf: solr_name,
-
pf: solr_name
-
}
-
end
-
-
1
config.add_search_field('publisher') do |field|
-
1
solr_name = solr_name("publisher", :stored_searchable)
-
1
field.solr_local_parameters = {
-
qf: solr_name,
-
pf: solr_name
-
}
-
end
-
-
1
config.add_search_field('referenced_by') do |field|
-
1
field.label = "Citation to related publication"
-
1
solr_name = solr_name("referenced_by", :stored_searchable)
-
1
field.solr_local_parameters = {
-
qf: solr_name,
-
pf: solr_name
-
}
-
end
-
-
1
config.add_search_field('resource_type') do |field|
-
1
solr_name = solr_name("resource_type", :stored_searchable)
-
1
field.solr_local_parameters = {
-
qf: solr_name,
-
pf: solr_name
-
}
-
end
-
-
1
config.add_search_field('rights_license') do |field|
-
1
solr_name = solr_name("rights_license", :stored_searchable)
-
1
field.solr_local_parameters = {
-
qf: solr_name,
-
pf: solr_name
-
}
-
end
-
-
1
config.add_search_field('rights_license_other') do |field|
-
1
solr_name = solr_name("rights_license_other", :stored_searchable)
-
1
field.solr_local_parameters = {
-
qf: solr_name,
-
pf: solr_name
-
}
-
end
-
-
1
config.add_search_field('rights_statement') do |field|
-
1
solr_name = solr_name("rights_statement", :stored_searchable)
-
1
field.solr_local_parameters = {
-
qf: solr_name,
-
pf: solr_name
-
}
-
end
-
-
1
config.add_search_field('subject') do |field|
-
1
solr_name = solr_name("subject", :stored_searchable)
-
1
field.solr_local_parameters = {
-
qf: solr_name,
-
pf: solr_name
-
}
-
end
-
-
1
config.add_search_field('subject_discipline') do |field|
-
1
solr_name = solr_name("subject_discipline", :stored_searchable)
-
1
field.solr_local_parameters = {
-
qf: solr_name,
-
pf: solr_name
-
}
-
end
-
-
1
config.add_search_field('title') do |field|
-
1
solr_name = solr_name("title", :stored_searchable)
-
1
field.solr_local_parameters = {
-
qf: solr_name,
-
pf: solr_name
-
}
-
end
-
-
1
config.add_search_field('total_file_size') do |field|
-
1
solr_name = solr_name("total_file_size", :stored_searchable)
-
1
field.solr_local_parameters = {
-
qf: solr_name,
-
pf: solr_name
-
}
-
end
-
-
# "sort results by" select (pulldown)
-
# label in pulldown is followed by the name of the SOLR field to sort by and
-
# whether the sort is ascending or descending (it must be asc or desc
-
# except in the relevancy case).
-
# label is key, solr field is value
-
1
config.add_sort_field "score desc, #{modified_field} desc", label: "relevance"
-
1
config.add_sort_field "#{uploaded_field} desc", label: "date created \u25BC"
-
1
config.add_sort_field "#{uploaded_field} asc", label: "date created \u25B2"
-
1
config.add_sort_field "#{modified_field} desc", label: "last modified \u25BC"
-
1
config.add_sort_field "#{modified_field} asc", label: "last modified \u25B2"
-
-
# Need to reindex the collection to be able to use these.
-
# config.add_sort_field "titl_ssi desc", label: "date modified \u25BC"
-
# config.add_sort_field "title_ssi asc", label: "date modified \u25B2"
-
-
# If there are more than this many search results, no spelling ("did you
-
# mean") suggestion is offered.
-
1
config.spell_max = 5
-
end
-
-
# disable the bookmark control from displaying in gallery view
-
# Hyrax doesn't show any of the default controls on the list view, so
-
# this method is not called in that context.
-
1
def render_bookmarks_control?
-
1
false
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
module Deepblue
-
-
1
module WorksControllerBehavior
-
1
extend ActiveSupport::Concern
-
#in umrdr
-
#include Hyrax::Controller
-
1
include Hyrax::WorksControllerBehavior
-
1
include Deepblue::ControllerWorkflowEventBehavior
-
-
1
def after_update_response
-
if curation_concern.file_sets.present?
-
return redirect_to main_app.copy_access_hyrax_permission_path(curation_concern) if permissions_changed?
-
return redirect_to main_app.confirm_hyrax_permission_path(curation_concern) if curation_concern.visibility_changed?
-
end
-
respond_to do |wants|
-
wants.html { redirect_to [main_app, curation_concern], notice: "Work \"#{curation_concern}\" successfully updated." }
-
wants.json { render :show, status: :ok, location: polymorphic_path([main_app, curation_concern]) }
-
end
-
end
-
-
# override curation concerns, add form fields values
-
1
def build_form
-
super
-
# Set up the multiple parameters for the date coverage attribute in the form
-
cov_date = Date.edtf(@form.date_coverage)
-
cov_params = Dataset::DateCoverageService.interval_to_params cov_date
-
@form.merge_date_coverage_attributes! cov_params
-
end
-
-
end
-
-
end
-
1
module Hyrax
-
1
module Breadcrumbs
-
1
extend ActiveSupport::Concern
-
-
1
def build_breadcrumbs
-
if request.referer
-
trail_from_referer
-
else
-
default_trail
-
end
-
end
-
-
1
def default_trail_start
-
add_breadcrumb I18n.t('hyrax.controls.home'), hyrax.root_path
-
add_breadcrumb I18n.t('hyrax.dashboard.title'), hyrax.dashboard_path if user_signed_in?
-
end
-
-
-
1
def default_trail
-
#add_breadcrumb I18n.t('hyrax.controls.home'), hyrax.root_path
-
#add_breadcrumb I18n.t('hyrax.dashboard.title'), hyrax.dashboard_path if user_signed_in?
-
-
case request.referer
-
when /catalog/
-
add_breadcrumb I18n.t('hyrax.controls.home'), hyrax.root_path
-
add_breadcrumb I18n.t('hyrax.dashboard.title'), hyrax.dashboard_path if user_signed_in?
-
else
-
default_trail_start
-
add_breadcrumb_for_controller if user_signed_in?
-
begin
-
add_breadcrumb_for_action # errors when parent is missing
-
rescue
-
# noop
-
end
-
end
-
end
-
-
1
def trail_from_referer
-
case request.referer
-
when /catalog/
-
add_breadcrumb I18n.t('hyrax.controls.home'), hyrax.root_path
-
add_breadcrumb I18n.t('hyrax.bread_crumb.search_results'), request.referer
-
else
-
default_trail_start
-
add_breadcrumb_for_controller if user_signed_in?
-
add_breadcrumb_for_action
-
end
-
end
-
-
# Override these in your controller
-
1
def add_breadcrumb_for_controller; end
-
-
# Override these in your controller
-
1
def add_breadcrumb_for_action; end
-
end
-
end
-
-
1
module Hyrax
-
-
1
module CollectionsControllerBehavior
-
1
extend ActiveSupport::Concern
-
1
include Blacklight::AccessControls::Catalog
-
1
include Blacklight::Base
-
-
1
included do
-
# include the display_trophy_link view helper method
-
1
helper Hyrax::TrophyHelper
-
-
# This is needed as of BL 3.7
-
1
copy_blacklight_config_from(::CatalogController)
-
-
1
class_attribute :presenter_class,
-
:form_class,
-
:single_item_search_builder_class,
-
:membership_service_class
-
-
1
self.presenter_class = Hyrax::CollectionPresenter
-
-
# The search builder to find the collection
-
1
self.single_item_search_builder_class = SingleCollectionSearchBuilder
-
# The search builder to find the collections' members
-
1
self.membership_service_class = Collections::CollectionMemberService
-
end
-
-
1
def create
-
# ::Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
# Deepblue::LoggingHelper.called_from,
-
# Deepblue::LoggingHelper.obj_class( 'class', self ),
-
# "params[:id]=#{params[:id]}",
-
# "params=#{params}" ]
-
super
-
end
-
-
1
def destroy
-
# ::Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
# Deepblue::LoggingHelper.called_from,
-
# Deepblue::LoggingHelper.obj_class( 'class', self ),
-
# "params[:id]=#{params[:id]}",
-
# "params=#{params}" ]
-
super
-
end
-
-
1
def edit
-
# ::Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
# Deepblue::LoggingHelper.called_from,
-
# Deepblue::LoggingHelper.obj_class( 'class', self ),
-
# "params[:id]=#{params[:id]}",
-
# "params=#{params}" ]
-
super
-
end
-
-
1
def show
-
# ::Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
# Deepblue::LoggingHelper.called_from,
-
# Deepblue::LoggingHelper.obj_class( 'class', self ),
-
# "params[:id]=#{params[:id]}",
-
# "params=#{params}" ]
-
@curation_concern ||= ActiveFedora::Base.find(params[:id])
-
presenter
-
query_collection_members
-
end
-
-
1
def update
-
# ::Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
# Deepblue::LoggingHelper.called_from,
-
# Deepblue::LoggingHelper.obj_class( 'class', self ),
-
# "params[:id]=#{params[:id]}",
-
# "params=#{params}" ]
-
super
-
end
-
-
1
def collection
-
action_name == 'show' ? @presenter : @collection
-
end
-
-
1
private
-
-
1
def presenter
-
@presenter ||= begin
-
# Query Solr for the collection.
-
# run the solr query to find the collection members
-
response = repository.search(single_item_search_builder.query)
-
curation_concern = response.documents.first
-
raise CanCan::AccessDenied unless curation_concern
-
presenter_class.new(curation_concern, current_ability)
-
end
-
end
-
-
# Instantiates the search builder that builds a query for a single item
-
# this is useful in the show view.
-
1
def single_item_search_builder
-
single_item_search_builder_class.new(self).with(params.except(:q, :page))
-
end
-
-
1
def collection_params
-
form_class.model_attributes(params[:collection])
-
end
-
-
# Include 'catalog' and 'hyrax/base' in the search path for views, while prefering
-
# our local paths. Thus we are unable to just override `self.local_prefixes`
-
1
def _prefixes
-
@_prefixes ||= super + ['catalog', 'hyrax/base']
-
end
-
-
1
def query_collection_members
-
member_works
-
member_subcollections if collection.collection_type.nestable?
-
parent_collections if collection.collection_type.nestable? && action_name == 'show'
-
end
-
-
# Instantiate the membership query service
-
1
def collection_member_service
-
@collection_member_service ||= membership_service_class.new(scope: self, collection: collection, params: params_for_query)
-
end
-
-
1
def member_works
-
@response = collection_member_service.available_member_works
-
@member_docs = @response.documents
-
@members_count = @response.total
-
end
-
-
1
def parent_collections
-
page = params[:parent_collection_page].to_i
-
query = Hyrax::Collections::NestedCollectionQueryService
-
collection.parent_collections = query.parent_collections(child: collection_object, scope: self, page: page)
-
end
-
-
1
def collection_object
-
action_name == 'show' ? Collection.find(collection.id) : collection
-
end
-
-
1
def member_subcollections
-
results = collection_member_service.available_member_subcollections
-
@subcollection_solr_response = results
-
@subcollection_docs = results.documents
-
@subcollection_count = @presenter.subcollection_count = results.total
-
end
-
-
# You can override this method if you need to provide additional inputs to the search
-
# builder. For example:
-
# search_field: 'all_fields'
-
# @return <Hash> the inputs required for the collection member query service
-
1
def params_for_query
-
params.merge(q: params[:cq])
-
end
-
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
require File.join( Gem::Specification.find_by_name("hyrax").full_gem_path, "app/controllers/concerns/hyrax/works_controller_behavior.rb" )
-
-
1
module Hyrax
-
-
1
module WorksControllerBehavior
-
-
1
private
-
-
1
def curation_concern_from_search_results
-
search_params = params
-
search_params.delete :page
-
search_result_document(search_params)
-
end
-
-
# Only returns unsuppressed documents the user has read access to
-
1
def search_result_document(search_params)
-
_, document_list = search_results(search_params)
-
return document_list.first unless document_list.empty?
-
document_not_found!
-
rescue Blacklight::Exceptions::RecordNotFound => e
-
unless user_signed_in?
-
# ::Deepblue::LoggingHelper.bold_debug [ ::Deepblue::LoggingHelper.here,
-
# ::Deepblue::LoggingHelper.called_from,
-
# "about to redirect - 01",
-
# "" ]
-
return redirect_to guest_user_message_url, alert: "unable to present requested work"
-
end
-
# ::Deepblue::LoggingHelper.bold_debug [ ::Deepblue::LoggingHelper.here,
-
# ::Deepblue::LoggingHelper.called_from,
-
# "e=#{e}",
-
# "" ] # + e.backtrace
-
begin
-
# check with Fedora to see if the requested id was deleted
-
id = params[:id]
-
ActiveFedora::Base.find( id )
-
rescue Ldp::Gone => gone
-
# it was deleted
-
# ::Deepblue::LoggingHelper.bold_debug [ ::Deepblue::LoggingHelper.here,
-
# ::Deepblue::LoggingHelper.called_from,
-
# "gone=#{gone.class} #{gone.message} at #{gone.backtrace[0]}",
-
# "" ]
-
# okay, since this looks like a deleted curation concern, we can check the provenance log
-
# if admin, redirect to the provenance log controller
-
if current_ability.admin?
-
# url = provenance_log_url
-
# ::Deepblue::LoggingHelper.bold_debug [ ::Deepblue::LoggingHelper.here,
-
# ::Deepblue::LoggingHelper.called_from,
-
# "about to redirect to url=#{url}",
-
# "" ]
-
return redirect_to( provenance_log_url, alert: "\"#{id}\" was deleted." )
-
end
-
rescue ActiveFedora::ObjectNotFoundError => e2
-
if current_ability.admin?
-
# nope, never existed
-
# url = provenance_log_url
-
# ::Deepblue::LoggingHelper.bold_debug [ ::Deepblue::LoggingHelper.here,
-
# ::Deepblue::LoggingHelper.called_from,
-
# "e2=#{e2.class} #{e2.message} at #{e2.backtrace[0]}",
-
# "about to redirect - 02a - url=#{url}",
-
# "" ]
-
return redirect_to( provenance_log_url, alert: "\"#{id}\" not found." )
-
end
-
end
-
# ::Deepblue::LoggingHelper.bold_debug [ ::Deepblue::LoggingHelper.here,
-
# ::Deepblue::LoggingHelper.called_from,
-
# "about to redirect - 02 - guest_user_message_url=#{guest_user_message_url}",
-
# "" ]
-
return redirect_to( guest_user_message_url, alert: "unable to present requested work" )
-
end
-
-
1
def document_not_found!
-
doc = ::SolrDocument.find(params[:id])
-
raise WorkflowAuthorizationException if doc.suppressed? && current_ability.can?(:read, doc)
-
raise CanCan::AccessDenied.new(nil, :show)
-
end
-
-
1
def provenance_log_url
-
id = params[:id]
-
Rails.application.routes.url_helpers.url_for( only_path: true,
-
action: 'show',
-
controller: 'provenance_log',
-
id: id )
-
# guest_user_message_url
-
end
-
-
1
def guest_user_message_url
-
Rails.application.routes.url_helpers.url_for( only_path: true,
-
action: 'show',
-
controller: 'guest_user_message' )
-
end
-
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
module Deepblue
-
-
1
module CollectionsControllerBehavior
-
-
1
include Deepblue::ControllerWorkflowEventBehavior
-
-
1
PARAMS_KEY = 'collection'
-
-
## Provenance log
-
-
1
def provenance_log_update_after
-
# ::Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
# Deepblue::LoggingHelper.called_from,
-
# Deepblue::LoggingHelper.obj_class( 'class', self ),
-
# "@update_attr_key_values=#{@update_attr_key_values}",
-
# "" ]
-
curation_concern.provenance_log_update_after( current_user: current_user,
-
event_note: default_event_note,
-
update_attr_key_values: @update_attr_key_values )
-
end
-
-
1
def provenance_log_update_before
-
# ::Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
# Deepblue::LoggingHelper.called_from,
-
# Deepblue::LoggingHelper.obj_class( 'class', self ),
-
# "@update_attr_key_values=#{@update_attr_key_values}",
-
# "" ]
-
return unless @update_attr_key_values.nil?
-
@update_attr_key_values = curation_concern.provenance_log_update_before( form_params: params[params_key].dup )
-
end
-
-
## end Provenance log
-
-
## visibility / publish
-
-
1
def visiblity_changed
-
# ::Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
# Deepblue::LoggingHelper.called_from,
-
# Deepblue::LoggingHelper.obj_class( 'class', self ),
-
# "" ]
-
@update_attr_key_values = curation_concern.provenance_log_update_before( form_params: params[PARAMS_KEY].dup )
-
if visibility_to_private?
-
mark_as_set_to_private
-
elsif visibility_to_public?
-
mark_as_set_to_public
-
end
-
end
-
-
1
def visibility_changed_update
-
# ::Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
# Deepblue::LoggingHelper.called_from,
-
# Deepblue::LoggingHelper.obj_class( 'class', self ),
-
# "" ]
-
if curation_concern.private? && @visibility_changed_to_private
-
workflow_unpublish
-
elsif curation_concern.public? && @visibility_changed_to_public
-
workflow_publish
-
end
-
end
-
-
1
def visibility_to_private?
-
# ::Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
# Deepblue::LoggingHelper.called_from,
-
# Deepblue::LoggingHelper.obj_class( 'class', self ),
-
# "" ]
-
return false if curation_concern.private?
-
params[params_key]['visibility'] == Hydra::AccessControls::AccessRight::VISIBILITY_TEXT_VALUE_PRIVATE
-
end
-
-
1
def visibility_to_public?
-
# ::Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
# Deepblue::LoggingHelper.called_from,
-
# Deepblue::LoggingHelper.obj_class( 'class', self ),
-
# "" ]
-
return false if curation_concern.public?
-
params[params_key]['visibility'] == Hydra::AccessControls::AccessRight::VISIBILITY_TEXT_VALUE_PUBLIC
-
end
-
-
1
def mark_as_set_to_private
-
@visibility_changed_to_public = false
-
@visibility_changed_to_private = true
-
end
-
-
1
def mark_as_set_to_public
-
@visibility_changed_to_public = true
-
@visibility_changed_to_private = false
-
end
-
-
## end visibility / publish
-
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
module Deepblue
-
-
1
module ControllerWorkflowEventBehavior
-
-
1
def workflow_create
-
::Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
Deepblue::LoggingHelper.called_from,
-
Deepblue::LoggingHelper.obj_class( 'class', self ),
-
"current_user=#{current_user}",
-
"" ]
-
curation_concern.workflow_create( current_user: current_user,
-
event_note: "#{self.class.name} - deposited by #{curation_concern.depositor}" )
-
end
-
-
1
def workflow_destroy
-
::Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
Deepblue::LoggingHelper.called_from,
-
Deepblue::LoggingHelper.obj_class( 'class', self ),
-
"current_user=#{current_user}",
-
"" ]
-
curation_concern.workflow_destroy( current_user: current_user, event_note: "#{self.class.name}" )
-
end
-
-
1
def workflow_publish
-
::Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
Deepblue::LoggingHelper.called_from,
-
Deepblue::LoggingHelper.obj_class( 'class', self ),
-
"current_user=#{current_user}",
-
"" ]
-
curation_concern.workflow_publish( current_user: current_user, event_note: "#{self.class.name}" )
-
end
-
-
1
def workflow_unpublish
-
::Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
Deepblue::LoggingHelper.called_from,
-
Deepblue::LoggingHelper.obj_class( 'class', self ),
-
"current_user=#{current_user}",
-
"" ]
-
curation_concern.workflow_unpublish( current_user: current_user, event_note: "#{self.class.name}" )
-
end
-
-
1
def workflow_update_before( current_user:, event_note: "" )
-
-
end
-
-
1
def workflow_update_after( current_user:, event_note: "" )
-
-
end
-
-
end
-
-
end
-
# frozen_string_literal: true
-
-
# due to double redirecting for guest users (those who aren't logged in) when directing them back to
-
# the main app window, the flash message is lost
-
# so create a specialized window for displaying messages
-
1
class GuestUserMessageController < ApplicationController
-
-
1
class_attribute :presenter_class
-
1
self.presenter_class = GuestUserMessagePresenter
-
-
1
def show
-
1
@presenter = presenter_class.new( controller: self )
-
1
render '/guest_user_message'
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
module Hyrax
-
-
1
class CollectionsController < DeepblueController
-
-
1
EVENT_NOTE = 'Hyrax::CollectionsController'
-
1
PARAMS_KEY = 'collection'
-
-
1
include Hyrax::CollectionsControllerBehavior
-
1
include Deepblue::ControllerWorkflowEventBehavior
-
1
include BreadcrumbsForCollections
-
-
1
before_action :deepblue_collections_controller_debug
-
-
1
before_action :workflow_destroy, only: [:destroy]
-
1
before_action :provenance_log_update_before, only: [:update]
-
1
before_action :visiblity_changed, only: [:update]
-
-
1
after_action :workflow_create, only: [:create]
-
1
after_action :provenance_log_update_after, only: [:update]
-
1
after_action :visibility_changed_update, only: [:update]
-
-
1
protect_from_forgery with: :null_session, only: [:display_provenance_log]
-
-
1
with_themed_layout :decide_layout
-
1
load_and_authorize_resource except: %i[index show create], instance_name: :collection
-
-
1
def deepblue_collections_controller_debug
-
::Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
Deepblue::LoggingHelper.called_from,
-
"params=#{params}" ]
-
end
-
-
# Renders a JSON response with a list of files in this collection
-
# This is used by the edit form to populate the thumbnail_id dropdown
-
1
def files
-
result = form.select_files.map do |label, id|
-
{ id: id, text: label }
-
end
-
render json: result
-
end
-
-
1
def curation_concern
-
@collection ||= ActiveFedora::Base.find(params[:id])
-
end
-
-
## Provenance log
-
-
1
def provenance_log_update_after
-
# ::Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
# Deepblue::LoggingHelper.called_from,
-
# Deepblue::LoggingHelper.obj_class( 'class', self ),
-
# "" ]
-
curation_concern.provenance_log_update_after( current_user: current_user,
-
# event_note: 'CollectionsController.provenance_log_update_after',
-
update_attr_key_values: @update_attr_key_values )
-
end
-
-
1
def provenance_log_update_before
-
# ::Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
# Deepblue::LoggingHelper.called_from,
-
# Deepblue::LoggingHelper.obj_class( 'class', self ),
-
# "@update_attr_key_values=#{@update_attr_key_values}",
-
# "" ]
-
return unless @update_attr_key_values.nil?
-
@update_attr_key_values = curation_concern.provenance_log_update_before( form_params: params[PARAMS_KEY].dup )
-
end
-
-
## end Provenance log
-
-
## display provenance log
-
-
1
def display_provenance_log
-
# load provenance log for this work
-
id = @collection.id # curation_concern.id
-
file_path = Deepblue::ProvenancePath.path_for_reference( id )
-
Deepblue::LoggingHelper.bold_debug [ "CollectionsController", "display_provenance_log", file_path ]
-
Deepblue::ProvenanceLogService.entries( id, refresh: true )
-
# continue on to normal display
-
#redirect_to [main_app, curation_concern]
-
redirect_back fallback_location: root_url
-
end
-
-
1
def display_provenance_log_enabled?
-
true
-
end
-
-
1
def provenance_log_entries_present?
-
provenance_log_entries.present?
-
end
-
-
## end display provenance log
-
-
## visibility / publish
-
-
1
def visiblity_changed
-
# ::Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
# Deepblue::LoggingHelper.called_from,
-
# Deepblue::LoggingHelper.obj_class( 'class', self ),
-
# "" ]
-
@update_attr_key_values = curation_concern.provenance_log_update_before( form_params: params[PARAMS_KEY].dup )
-
if visibility_to_private?
-
mark_as_set_to_private
-
elsif visibility_to_public?
-
mark_as_set_to_public
-
end
-
end
-
-
1
def visibility_changed_update
-
# ::Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
# Deepblue::LoggingHelper.called_from,
-
# Deepblue::LoggingHelper.obj_class( 'class', self ),
-
# "" ]
-
if curation_concern.private? && @visibility_changed_to_private
-
workflow_unpublish
-
elsif curation_concern.public? && @visibility_changed_to_public
-
workflow_publish
-
end
-
end
-
-
1
def visibility_to_private?
-
# ::Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
# Deepblue::LoggingHelper.called_from,
-
# Deepblue::LoggingHelper.obj_class( 'class', self ),
-
# "" ]
-
return false if curation_concern.private?
-
params[PARAMS_KEY]['visibility'] == Hydra::AccessControls::AccessRight::VISIBILITY_TEXT_VALUE_PRIVATE
-
end
-
-
1
def visibility_to_public?
-
# ::Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
# Deepblue::LoggingHelper.called_from,
-
# Deepblue::LoggingHelper.obj_class( 'class', self ),
-
# "" ]
-
return false if curation_concern.public?
-
params[PARAMS_KEY]['visibility'] == Hydra::AccessControls::AccessRight::VISIBILITY_TEXT_VALUE_PUBLIC
-
end
-
-
1
def mark_as_set_to_private
-
@visibility_changed_to_public = false
-
@visibility_changed_to_private = true
-
end
-
-
1
def mark_as_set_to_public
-
@visibility_changed_to_public = true
-
@visibility_changed_to_private = false
-
end
-
-
## end visibility / publish
-
-
1
private
-
-
1
def form
-
@form ||= form_class.new( @collection, current_ability, repository )
-
end
-
-
1
def decide_layout
-
layout = case action_name
-
when 'show'
-
'1_column'
-
else
-
'dashboard'
-
end
-
File.join( theme, layout )
-
end
-
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
require File.join( Gem::Specification.find_by_name("hyrax").full_gem_path, "app/controllers/hyrax/dashboard/collections_controller.rb" )
-
-
1
module Hyrax
-
-
1
module Dashboard
-
-
# monkey patch Hyrax::Dashboard::CollectionsController
-
-
## Shows a list of all collections to the admins
-
1
class CollectionsController < Hyrax::My::CollectionsController
-
1
include ::Hyrax::BrandingHelper
-
1
include Deepblue::CollectionsControllerBehavior
-
-
1
EVENT_NOTE = 'Hyrax::Dashboard::CollectionsController'
-
1
PARAMS_KEY = 'collection'
-
-
## begin monkey patch overrides
-
-
1
alias_method :monkey_after_create, :after_create
-
1
alias_method :monkey_destroy, :destroy
-
-
1
def after_create
-
1
monkey_after_create
-
1
workflow_create
-
end
-
-
1
def destroy
-
1
workflow_destroy
-
1
monkey_destroy
-
end
-
-
1
def show
-
3
if @collection.collection_type.brandable?
-
2
banner_info = collection_banner_info( id: @collection.id )
-
2
@banner = brand_path( collection_branding_info: banner_info.first ) unless banner_info.empty?
-
end
-
-
3
presenter
-
3
query_collection_members
-
end
-
-
## end monkey patch overrides
-
-
1
before_action :provenance_log_update_before, only: [:update]
-
1
after_action :provenance_log_update_after, only: [:update]
-
-
1
def curation_concern
-
2
@collection ||= ActiveFedora::Base.find(params[:id])
-
end
-
-
1
def default_event_note
-
1
EVENT_NOTE
-
end
-
-
1
def params_key
-
1
PARAMS_KEY
-
end
-
-
## begin monkey patch banner
-
-
1
def process_banner_input
-
# ::Deepblue::LoggingHelper.bold_debug [ ::Deepblue::LoggingHelper.here,
-
# ::Deepblue::LoggingHelper.called_from,
-
# "@collection.id = #{@collection.id}",
-
# "" ]
-
3
return update_existing_banner if params["banner_unchanged"] == "true"
-
2
remove_banner
-
2
uploaded_file_ids = params["banner_files"]
-
2
add_new_banner(uploaded_file_ids) if uploaded_file_ids
-
end
-
-
1
def update_existing_banner
-
# ::Deepblue::LoggingHelper.bold_debug [ ::Deepblue::LoggingHelper.here,
-
# ::Deepblue::LoggingHelper.called_from,
-
# "@collection.id = #{@collection.id}",
-
# "" ]
-
1
banner_info = collection_banner_info( id: @collection.id )
-
1
banner_info.first.save(banner_info.first.local_path, false)
-
end
-
-
1
def add_new_banner(uploaded_file_ids)
-
# ::Deepblue::LoggingHelper.bold_debug [ ::Deepblue::LoggingHelper.here,
-
# ::Deepblue::LoggingHelper.called_from,
-
# "@collection.id = #{@collection.id}",
-
# "uploaded_file_ids = #{uploaded_file_ids}",
-
# "" ]
-
1
f = uploaded_files(uploaded_file_ids).first
-
# ::Deepblue::LoggingHelper.bold_debug [ ::Deepblue::LoggingHelper.here,
-
# ::Deepblue::LoggingHelper.called_from,
-
# "@collection.id = #{@collection.id}",
-
# "f.file_url = #{f.file_url}",
-
# "" ]
-
1
banner_info = CollectionBrandingInfo.new(
-
collection_id: @collection.id,
-
filename: File.split(f.file_url).last,
-
role: "banner",
-
alt_txt: "",
-
target_url: ""
-
)
-
1
banner_info.save f.file_url
-
end
-
-
1
def remove_banner
-
# ::Deepblue::LoggingHelper.bold_debug [ ::Deepblue::LoggingHelper.here,
-
# ::Deepblue::LoggingHelper.called_from,
-
# "@collection.id = #{@collection.id}",
-
# "" ]
-
1
banner_info = collection_banner_info( id: @collection.id )
-
1
banner_info&.delete_all
-
end
-
-
## end monkey patch banner
-
-
end
-
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
module Hyrax
-
-
1
class DataSetsController < DeepblueController
-
-
1
PARAMS_KEY = 'data_set'
-
-
1
include Deepblue::WorksControllerBehavior
-
-
1
self.curation_concern_type = ::DataSet
-
1
self.show_presenter = Hyrax::DataSetPresenter
-
-
1
before_action :assign_date_coverage, only: %i[create update]
-
1
before_action :assign_admin_set, only: %i[create update]
-
1
before_action :workflow_destroy, only: [:destroy]
-
1
before_action :provenance_log_update_before, only: [:update]
-
1
before_action :visiblity_changed, only: [:update]
-
1
before_action :prepare_permissions, only: [:show]
-
-
1
after_action :workflow_create, only: [:create]
-
1
after_action :visibility_changed_update, only: [:update]
-
1
after_action :provenance_log_update_after, only: [:update]
-
1
after_action :reset_permissions, only: [:show]
-
-
1
protect_from_forgery with: :null_session, only: [:display_provenance_log]
-
1
protect_from_forgery with: :null_session, only: [:globus_add_email]
-
1
protect_from_forgery with: :null_session, only: [:globus_download]
-
1
protect_from_forgery with: :null_session, only: [:globus_download_add_email]
-
1
protect_from_forgery with: :null_session, only: [:globus_download_notify_me]
-
1
protect_from_forgery with: :null_session, only: [:zip_download]
-
-
1
attr_accessor :user_email_one, :user_email_two
-
-
1
attr_accessor :provenance_log_entries
-
-
# These methods (prepare_permissions, and reset_permissions) are used so that
-
# when viewing a tombstoned work, and the user is not admin, the user
-
# will be able to see the metadata.
-
1
def prepare_permissions
-
if current_ability.admin?
-
else
-
# Need to add admin group to current_ability
-
# or presenter will not be accessible.
-
current_ability.user_groups << "admin"
-
if presenter&.tombstone.present?
-
else
-
current_ability.user_groups.delete("admin")
-
end
-
end
-
end
-
-
1
def reset_permissions
-
current_ability.user_groups.delete("admin")
-
end
-
-
-
## box integration
-
-
1
def box_create_dir_and_add_collaborator
-
return nil unless DeepBlueDocs::Application.config.box_integration_enabled
-
user_email = Deepblue::EmailHelper.user_email_from( current_user )
-
BoxHelper.create_dir_and_add_collaborator( curation_concern.id, user_email: user_email )
-
end
-
-
1
def box_link
-
return nil unless DeepBlueDocs::Application.config.box_integration_enabled
-
BoxHelper.box_link( curation_concern.id )
-
end
-
-
1
def box_work_created
-
box_create_dir_and_add_collaborator
-
end
-
-
## end box integration
-
-
## date_coverage
-
-
# Create EDTF::Interval from form parameters
-
# Replace the date coverage parameter prior with serialization of EDTF::Interval
-
1
def assign_date_coverage
-
cov_interval = Dataset::DateCoverageService.params_to_interval params
-
params[PARAMS_KEY]['date_coverage'] = cov_interval ? cov_interval.edtf : ""
-
end
-
-
1
def assign_admin_set
-
admin_sets = Hyrax::AdminSetService.new(self).search_results(:deposit)
-
admin_sets.each do |admin_set|
-
if admin_set.id != "admin_set/default"
-
params[PARAMS_KEY]['admin_set_id'] = admin_set.id
-
end
-
end
-
end
-
-
# end date_coverage
-
-
## DOI
-
-
1
def doi
-
doi_mint
-
respond_to do |wants|
-
wants.html { redirect_to [main_app, curation_concern] }
-
wants.json do
-
render :show,
-
status: :ok,
-
location: polymorphic_path([main_app, curation_concern])
-
end
-
end
-
end
-
-
1
def doi_minting_enabled?
-
::Deepblue::DoiBehavior::DOI_MINTING_ENABLED
-
end
-
-
1
def doi_mint
-
# Do not mint doi if
-
# one already exists
-
# work file_set count is 0.
-
if curation_concern.doi_pending?
-
flash[:notice] = MsgHelper.t( 'data_set.doi_is_being_minted' )
-
elsif curation_concern.doi_minted?
-
flash[:notice] = MsgHelper.t( 'data_set.doi_already_exists' )
-
elsif curation_concern.file_sets.count < 1
-
flash[:notice] = MsgHelper.t( 'data_set.doi_requires_work_with_files' )
-
elsif ( curation_concern.depositor != current_user.email ) && !current_ability.admin?
-
flash[:notice] = MsgHelper.t( 'data_set.doi_user_without_access' )
-
elsif curation_concern.doi_mint( current_user: current_user, event_note: 'DataSetsController' )
-
flash[:notice] = MsgHelper.t( 'data_set.doi_minting_started' )
-
end
-
end
-
-
# def mint_doi_enabled?
-
# true
-
# end
-
-
## end DOI
-
-
## Globus
-
-
1
def globus_add_email
-
if user_signed_in?
-
user_email = Deepblue::EmailHelper.user_email_from( current_user )
-
globus_copy_job( user_email: user_email, delay_per_file_seconds: 0 )
-
flash_and_go_back globus_files_prepping_msg( user_email: user_email )
-
elsif params[:user_email_one].present? || params[:user_email_two].present?
-
user_email_one = params[:user_email_one].present? ? params[:user_email_one].strip : ''
-
user_email_two = params[:user_email_two].present? ? params[:user_email_two].strip : ''
-
# if user_email_one === user_email_two
-
if user_email_one == user_email_two
-
globus_copy_job( user_email: user_email_one, delay_per_file_seconds: 0 )
-
flash_and_redirect_to_main_cc globus_files_prepping_msg( user_email: user_email_one )
-
else
-
flash.now[:error] = emails_did_not_match_msg( user_email_one, user_email_two )
-
render 'globus_download_add_email_form'
-
end
-
else
-
flash_and_redirect_to_main_cc globus_status_msg
-
end
-
end
-
-
1
def globus_clean_download
-
::GlobusCleanJob.perform_later( curation_concern.id, clean_download: true )
-
globus_ui_delay
-
dirs = []
-
dirs << ::GlobusJob.target_download_dir( curation_concern.id )
-
dirs << ::GlobusJob.target_prep_dir( curation_concern.id, prefix: nil )
-
dirs << ::GlobusJob.target_prep_tmp_dir( curation_concern.id, prefix: nil )
-
flash_and_redirect_to_main_cc globus_clean_msg( dirs )
-
end
-
-
1
def globus_clean_prep
-
::GlobusCleanJob.perform_later( curation_concern.id, clean_download: false )
-
globus_ui_delay
-
end
-
-
1
def globus_complete?
-
::GlobusJob.copy_complete? curation_concern.id
-
end
-
-
1
def globus_copy_job( user_email: nil,
-
delay_per_file_seconds: DeepBlueDocs::Application.config.globus_debug_delay_per_file_copy_job_seconds )
-
-
::GlobusCopyJob.perform_later( curation_concern.id,
-
user_email: user_email,
-
delay_per_file_seconds: delay_per_file_seconds )
-
globus_ui_delay
-
end
-
-
1
def globus_download
-
if globus_complete?
-
flash_and_redirect_to_main_cc globus_files_available_here
-
else
-
user_email = Deepblue::EmailHelper.user_email_from( current_user, user_signed_in: user_signed_in? )
-
msg = if globus_prepping?
-
globus_files_prepping_msg( user_email: user_email )
-
else
-
globus_file_prep_started_msg( user_email: user_email )
-
end
-
if user_signed_in?
-
globus_copy_job( user_email: user_email )
-
flash_and_redirect_to_main_cc msg
-
else
-
render 'globus_download_notify_me_form'
-
end
-
end
-
end
-
-
1
def globus_download_add_email
-
if user_signed_in?
-
globus_add_email
-
else
-
render 'globus_download_add_email_form'
-
end
-
end
-
-
1
def globus_download_enabled?
-
DeepBlueDocs::Application.config.globus_enabled
-
end
-
-
1
def globus_download_notify_me
-
if user_signed_in?
-
user_email = Deepblue::EmailHelper.user_email_from( current_user )
-
globus_copy_job( user_email: user_email )
-
flash_and_go_back globus_file_prep_started_msg( user_email: user_email )
-
elsif params[:user_email_one].present? || params[:user_email_two].present?
-
user_email_one = params[:user_email_one].present? ? params[:user_email_one].strip : ''
-
user_email_two = params[:user_email_two].present? ? params[:user_email_two].strip : ''
-
# if user_email_one === user_email_two
-
if user_email_one == user_email_two
-
globus_copy_job( user_email: user_email_one )
-
flash_and_redirect_to_main_cc globus_file_prep_started_msg( user_email: user_email_one )
-
else
-
# flash_and_go_back emails_did_not_match_msg( user_email_one, user_email_two )
-
flash.now[:error] = emails_did_not_match_msg( user_email_one, user_email_two )
-
render 'globus_download_notify_me_form'
-
end
-
else
-
globus_copy_job( user_email: nil )
-
flash_and_redirect_to_main_cc globus_file_prep_started_msg
-
end
-
end
-
-
1
def globus_enabled?
-
DeepBlueDocs::Application.config.globus_enabled
-
end
-
-
1
def globus_last_error_msg
-
::GlobusJob.error_file_contents curation_concern.id
-
end
-
-
1
def globus_prepping?
-
::GlobusJob.files_prepping? curation_concern.id
-
end
-
-
1
def globus_ui_delay( delay_seconds: DeepBlueDocs::Application.config.globus_after_copy_job_ui_delay_seconds )
-
sleep delay_seconds if delay_seconds.positive?
-
end
-
-
1
def globus_url
-
::GlobusJob.external_url curation_concern.id
-
end
-
-
## end Globus
-
-
## Provenance log
-
-
1
def provenance_log_update_after
-
curation_concern.provenance_log_update_after( current_user: current_user,
-
# event_note: 'DataSetsController.provenance_log_update_after',
-
update_attr_key_values: @update_attr_key_values )
-
end
-
-
1
def provenance_log_update_before
-
@update_attr_key_values = curation_concern.provenance_log_update_before( form_params: params[PARAMS_KEY].dup )
-
end
-
-
## end Provenance log
-
-
## display provenance log
-
-
1
def display_provenance_log
-
# load provenance log for this work
-
file_path = Deepblue::ProvenancePath.path_for_reference( curation_concern.id )
-
Deepblue::LoggingHelper.bold_debug [ "DataSetsController", "display_provenance_log", file_path ]
-
Deepblue::ProvenanceLogService.entries( curation_concern.id, refresh: true )
-
# continue on to normal display
-
redirect_to polymorphic_url([main_app, curation_concern], anchor: "prov_log")
-
end
-
-
1
def display_provenance_log_enabled?
-
true
-
end
-
-
1
def provenance_log_entries_present?
-
provenance_log_entries.present?
-
end
-
-
## end display provenance log
-
-
## Tombstone
-
-
1
def tombstone
-
epitaph = params[:tombstone]
-
success = curation_concern.entomb!( epitaph, current_user )
-
msg = if success
-
MsgHelper.t( 'data_set.tombstone_notice', title: curation_concern.title.first.to_s, reason: epitaph.to_s )
-
else
-
"#{curation_concern.title.first} is already tombstoned."
-
end
-
redirect_to dashboard_works_path, notice: msg
-
end
-
-
1
def tombstone_enabled?
-
true
-
end
-
-
## End Tombstone
-
-
## visibility / publish
-
-
1
def visiblity_changed
-
# ::Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
# Deepblue::LoggingHelper.called_from,
-
# Deepblue::LoggingHelper.obj_class( 'class', self ),
-
# "" ]
-
if visibility_to_private?
-
mark_as_set_to_private
-
elsif visibility_to_public?
-
mark_as_set_to_public
-
end
-
end
-
-
1
def visibility_changed_update
-
# ::Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
# Deepblue::LoggingHelper.called_from,
-
# Deepblue::LoggingHelper.obj_class( 'class', self ),
-
# "" ]
-
if curation_concern.private? && @visibility_changed_to_private
-
workflow_unpublish
-
elsif curation_concern.public? && @visibility_changed_to_public
-
workflow_publish
-
end
-
end
-
-
1
def visibility_to_private?
-
# ::Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
# Deepblue::LoggingHelper.called_from,
-
# Deepblue::LoggingHelper.obj_class( 'class', self ),
-
# "" ]
-
return false if curation_concern.private?
-
params[PARAMS_KEY]['visibility'] == Hydra::AccessControls::AccessRight::VISIBILITY_TEXT_VALUE_PRIVATE
-
end
-
-
1
def visibility_to_public?
-
# ::Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
# Deepblue::LoggingHelper.called_from,
-
# Deepblue::LoggingHelper.obj_class( 'class', self ),
-
# "" ]
-
return false if curation_concern.public?
-
params[PARAMS_KEY]['visibility'] == Hydra::AccessControls::AccessRight::VISIBILITY_TEXT_VALUE_PUBLIC
-
end
-
-
1
def mark_as_set_to_private
-
@visibility_changed_to_public = false
-
@visibility_changed_to_private = true
-
end
-
-
1
def mark_as_set_to_public
-
@visibility_changed_to_public = true
-
@visibility_changed_to_private = false
-
end
-
-
## end visibility / publish
-
-
## begin zip download operations
-
-
1
def zip_download
-
require 'zip'
-
require 'tempfile'
-
-
tmp_dir = Settings.tmpdir || '/tmp'
-
tmp_dir = Pathname.new tmp_dir
-
# Deepblue::LoggingHelper.debug "Download Zip begin tmp_dir #{tmp_dir}"
-
Deepblue::LoggingHelper.bold_debug [ "zip_download begin", "tmp_dir=#{tmp_dir}" ]
-
target_dir = target_dir_name_id( tmp_dir, curation_concern.id )
-
# Deepblue::LoggingHelper.debug "Download Zip begin copy to folder #{target_dir}"
-
Deepblue::LoggingHelper.bold_debug [ "zip_download", "target_dir=#{target_dir}" ]
-
Dir.mkdir( target_dir ) unless Dir.exist?( target_dir )
-
target_zipfile = target_dir_name_id( target_dir, curation_concern.id, ".zip" )
-
# Deepblue::LoggingHelper.debug "Download Zip begin copy to target_zipfile #{target_zipfile}"
-
Deepblue::LoggingHelper.bold_debug [ "zip_download", "target_zipfile=#{target_zipfile}" ]
-
File.delete target_zipfile if File.exist? target_zipfile
-
# clean the zip directory if necessary, since the zip structure is currently flat, only
-
# have to clean files in the target folder
-
files = Dir.glob( (target_dir.join '*').to_s)
-
Deepblue::LoggingHelper.bold_debug files, label: "zip_download files to delete:"
-
files.each do |file|
-
File.delete file if File.exist? file
-
end
-
Deepblue::LoggingHelper.debug "Download Zip begin copy to folder #{target_dir}"
-
Deepblue::LoggingHelper.bold_debug [ "zip_download", "begin copy target_dir=#{target_dir}" ]
-
Zip::File.open(target_zipfile.to_s, Zip::File::CREATE ) do |zipfile|
-
metadata_filename = curation_concern.metadata_report( dir: target_dir )
-
zipfile.add( metadata_filename.basename, metadata_filename )
-
export_file_sets_to( target_dir: target_dir, log_prefix: "Zip: " ) do |target_file_name, target_file|
-
zipfile.add( target_file_name, target_file )
-
end
-
end
-
# Deepblue::LoggingHelper.debug "Download Zip copy complete to folder #{target_dir}"
-
Deepblue::LoggingHelper.bold_debug [ "zip_download", "download complete target_dir=#{target_dir}" ]
-
send_file target_zipfile.to_s
-
end
-
-
1
def zip_download_enabled?
-
Settings.zip_download_enabled
-
end
-
-
# end zip download operations
-
-
# # Create EDTF::Interval from form parameters
-
# # Replace the date coverage parameter prior with serialization of EDTF::Interval
-
# def assign_date_coverage
-
# ##cov_interval = Umrdr::DateCoverageService.params_to_interval params
-
# ##params['generic_work']['date_coverage'] = cov_interval ? [cov_interval.edtf] : []
-
# end
-
#
-
# def check_recent_uploads
-
# if params[:uploads_since]
-
# begin
-
# @recent_uploads = [];
-
# uploads_since = Time.at(params[:uploads_since].to_i / 1000.0)
-
# presenter.file_set_presenters.reverse_each do |file_set|
-
# date_uploaded = get_date_uploaded_from_solr(file_set)
-
# if date_uploaded.nil? or date_uploaded < uploads_since
-
# break
-
# end
-
# @recent_uploads.unshift file_set
-
# end
-
# rescue Exception => e
-
# Rails.logger.info "Something happened in check_recent_uploads: #{params[:uploads_since]} : #{e.message}"
-
# end
-
# end
-
# end
-
-
1
protected
-
-
1
def emails_did_not_match_msg( _user_email_one, _user_email_two )
-
"Emails did not match" # + ": '#{user_email_one}' != '#{user_email_two}'"
-
end
-
-
1
def export_file_sets_to( target_dir:,
-
log_prefix: "",
-
do_export_predicate: ->(_target_file_name, _target_file) { true },
-
quiet: false,
-
&block )
-
file_sets = curation_concern.file_sets
-
Deepblue::ExportFilesHelper.export_file_sets( target_dir: target_dir,
-
file_sets: file_sets,
-
log_prefix: log_prefix,
-
do_export_predicate: do_export_predicate,
-
quiet: quiet,
-
&block )
-
end
-
-
1
def flash_and_go_back( msg )
-
Deepblue::LoggingHelper.debug msg
-
redirect_back fallback_location: root_url, notice: msg
-
end
-
-
1
def flash_error_and_go_back( msg )
-
Deepblue::LoggingHelper.debug msg
-
redirect_back fallback_location: root_url, alert: msg
-
end
-
-
1
def flash_and_redirect_to_main_cc( msg )
-
Deepblue::LoggingHelper.debug msg
-
redirect_to [main_app, curation_concern], notice: msg
-
end
-
-
1
def globus_clean_msg( dir )
-
dirs = dir.join( MsgHelper.t( 'data_set.globus_clean_join_html' ) )
-
rv = MsgHelper.t( 'data_set.globus_clean', dirs: dirs )
-
return rv
-
end
-
-
1
def globus_file_prep_started_msg( user_email: nil )
-
MsgHelper.t( 'data_set.globus_file_prep_started',
-
when_available: globus_files_when_available( user_email: user_email ) )
-
end
-
-
1
def globus_files_prepping_msg( user_email: nil )
-
MsgHelper.t( 'data_set.globus_files_prepping',
-
when_available: globus_files_when_available( user_email: user_email ) )
-
end
-
-
1
def globus_files_when_available( user_email: nil )
-
if user_email.nil?
-
MsgHelper.t( 'data_set.globus_files_when_available' )
-
else
-
MsgHelper.t( 'data_set.globus_files_when_available_email', user_email: user_email )
-
end
-
end
-
-
1
def globus_files_available_here
-
MsgHelper.t( 'data_set.globus_files_available_here', globus_url: globus_url.to_s )
-
end
-
-
1
def globus_status_msg( user_email: nil )
-
msg = if globus_complete?
-
globus_files_available_here
-
elsif globus_prepping?
-
globus_files_prepping_msg( user_email: user_email )
-
else
-
globus_file_prep_started_msg( user_email: user_email )
-
end
-
msg
-
end
-
-
1
def show_presenter
-
Hyrax::DataSetPresenter
-
end
-
-
1
private
-
-
1
def get_date_uploaded_from_solr(file_set)
-
field = file_set.solr_document[Solrizer.solr_name('date_uploaded', :stored_sortable, type: :date)]
-
return if field.blank?
-
begin
-
Time.parse(field)
-
rescue
-
Rails.logger.info "Unable to parse date: #{field.first.inspect} for #{self['id']}"
-
end
-
end
-
-
1
def target_dir_name_id( dir, id, ext = '' )
-
dir.join "#{DeepBlueDocs::Application.config.base_file_name}#{id}#{ext}"
-
end
-
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
require 'edtf'
-
-
1
module Hyrax
-
-
1
class DeepblueController < ApplicationController
-
-
1
include Hyrax::BreadcrumbsForWorks
-
-
1
def box_enabled?
-
1
false
-
end
-
-
1
def display_provenance_log_enabled?
-
1
false
-
end
-
-
1
def doi_minting_enabled?
-
1
false
-
end
-
-
1
def globus_download_enabled?
-
1
false
-
end
-
-
# def mint_doi_enabled?
-
# false
-
# end
-
-
1
def tombstone_enabled?
-
1
false
-
end
-
-
1
def zip_download_enabled?
-
1
false
-
end
-
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
module Hyrax
-
# Generated controller for Dissertation
-
1
class DissertationsController < DeepblueController
-
# Adds Hyrax behaviors to the controller.
-
1
include Hyrax::WorksControllerBehavior
-
#include Hyrax::BreadcrumbsForWorks
-
1
self.curation_concern_type = ::Dissertation
-
-
# Use this line if you want to use a custom presenter
-
1
self.show_presenter = Hyrax::DissertationPresenter
-
end
-
end
-
# frozen_string_literal: true
-
-
1
require File.join( Gem::Specification.find_by_name("hyrax").full_gem_path, "app/controllers/hyrax/file_sets_controller.rb" )
-
-
1
module Hyrax
-
-
# monkey patch FileSetsController
-
1
class FileSetsController < ApplicationController
-
-
1
PARAMS_KEY = 'file_set'
-
1
self.show_presenter = Hyrax::DsFileSetPresenter
-
-
1
alias_method :monkey_attempt_update, :attempt_update
-
# alias_method :monkey_update_metadata, :update_metadata
-
-
1
before_action :provenance_log_destroy, only: [:destroy]
-
1
before_action :provenance_log_update_before, only: [:update]
-
-
1
after_action :provenance_log_create, only: [:create]
-
1
after_action :provenance_log_update_after, only: [:update]
-
-
1
protect_from_forgery with: :null_session, only: [:display_provenance_log]
-
-
## Provenance log
-
-
1
def provenance_log_create
-
1
curation_concern.provenance_create( current_user: current_user, event_note: 'FileSetsController' )
-
end
-
-
1
def provenance_log_destroy
-
1
curation_concern.provenance_destroy( current_user: current_user, event_note: 'FileSetsController' )
-
end
-
-
1
def provenance_log_update_after
-
1
curation_concern.provenance_log_update_after( current_user: current_user,
-
# event_note: 'FileSetsController.provenance_log_update_after',
-
update_attr_key_values: @update_attr_key_values )
-
end
-
-
1
def provenance_log_update_before
-
1
@update_attr_key_values = curation_concern.provenance_log_update_before( form_params: params[PARAMS_KEY].dup )
-
end
-
-
## end Provenance log
-
-
## display provenance log
-
-
1
def display_provenance_log
-
# load provenance log for this work
-
1
file_path = Deepblue::ProvenancePath.path_for_reference( curation_concern.id )
-
1
Deepblue::LoggingHelper.bold_debug [ "DataSetsController", "display_provenance_log", file_path ]
-
1
Deepblue::ProvenanceLogService.entries( curation_concern.id, refresh: true )
-
# continue on to normal display
-
1
redirect_to [main_app, curation_concern]
-
end
-
-
1
def display_provenance_log_enabled?
-
1
true
-
end
-
-
1
def provenance_log_entries_present?
-
provenance_log_entries.present?
-
end
-
-
## end display provenance log
-
-
1
protected
-
-
1
def attempt_update
-
# Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
# Deepblue::LoggingHelper.called_from,
-
# "current_user=#{current_user}",
-
# Deepblue::LoggingHelper.obj_class( "actor", actor ) ]
-
if wants_to_revert?
-
Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
Deepblue::LoggingHelper.called_from,
-
"current_user=#{current_user}",
-
Deepblue::LoggingHelper.obj_class( "actor", actor ),
-
"wants to revert" ]
-
actor.revert_content(params[:revision])
-
elsif params.key?(:file_set)
-
if params[:file_set].key?(:files)
-
Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
Deepblue::LoggingHelper.called_from,
-
"current_user=#{current_user}",
-
Deepblue::LoggingHelper.obj_class( "actor", actor ),
-
"actor.update_content" ]
-
actor.update_content(params[:file_set][:files].first)
-
else
-
Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
Deepblue::LoggingHelper.called_from,
-
"current_user=#{current_user}",
-
"update_metadata" ]
-
update_metadata
-
end
-
end
-
end
-
-
1
def presenter
-
@presenter ||= begin
-
curation_concern = search_result_document( params )
-
show_presenter.new( curation_concern, current_ability, request )
-
end
-
end
-
-
1
def show_presenter
-
Hyrax::DsFileSetPresenter
-
end
-
-
1
def search_result_document( search_params )
-
_, document_list = search_results( search_params )
-
return document_list.first unless document_list.empty?
-
# document_not_found!
-
raise CanCan::AccessDenied
-
rescue Blacklight::Exceptions::RecordNotFound => e
-
::Deepblue::LoggingHelper.bold_debug [ ::Deepblue::LoggingHelper.here,
-
::Deepblue::LoggingHelper.called_from,
-
"e=#{e}",
-
"" ]
-
begin
-
# check with Fedora to see if the requested id was deleted
-
id = params[:id]
-
ActiveFedora::Base.find( id )
-
rescue Ldp::Gone => gone
-
# it was deleted
-
::Deepblue::LoggingHelper.bold_debug [ ::Deepblue::LoggingHelper.here,
-
::Deepblue::LoggingHelper.called_from,
-
"gone=#{gone.class} #{gone.message} at #{gone.backtrace[0]}",
-
"" ]
-
# okay, since this looks like a deleted curation concern, we can check the provenance log
-
# if admin, redirect to the provenance log controller
-
if current_ability.admin?
-
url = Rails.application.routes.url_helpers.url_for( only_path: true,
-
action: 'show',
-
controller: 'provenance_log',
-
id: id )
-
return redirect_to( url, error: "#{id} was deleted." )
-
end
-
rescue ActiveFedora::ObjectNotFoundError => e2
-
# nope, never existed
-
::Deepblue::LoggingHelper.bold_debug [ ::Deepblue::LoggingHelper.here,
-
::Deepblue::LoggingHelper.called_from,
-
"e2=#{e2.class} #{e2.message} at #{e2.backtrace[0]}",
-
"" ]
-
end
-
raise CanCan::AccessDenied
-
end
-
-
end
-
-
end
-
# Generated via
-
# `rails generate hyrax:work GenericWork`
-
1
module Hyrax
-
# Generated controller for GenericWork
-
1
class GenericWorksController < ApplicationController
-
# Adds Hyrax behaviors to the controller.
-
1
include Hyrax::WorksControllerBehavior
-
1
include Hyrax::BreadcrumbsForWorks
-
1
self.curation_concern_type = ::GenericWork
-
-
# Use this line if you want to use a custom presenter
-
1
self.show_presenter = Hyrax::GenericWorkPresenter
-
end
-
end
-
-
1
module Hyrax
-
-
1
module My
-
-
1
class CollectionsController < MyController
-
-
1
EVENT_NOTE = 'Hyrax::My::CollectionsController'
-
1
PARAMS_KEY = 'collection'
-
-
# Define collection specific filter facets.
-
1
def self.configure_facets
-
1
configure_blacklight do |config|
-
# Name of pivot facet must match field name that uses helper_method
-
1
config.add_facet_field Collection.collection_type_gid_document_field_name,
-
helper_method: :collection_type_label, limit: 5,
-
pivot: ['has_model_ssim', Collection.collection_type_gid_document_field_name],
-
label: I18n.t('hyrax.dashboard.my.heading.collection_type')
-
# This causes AdminSets to also be shown with the Collection Type label
-
1
config.add_facet_field 'has_model_ssim',
-
label: I18n.t('hyrax.dashboard.my.heading.collection_type'),
-
limit: 5, show: false
-
end
-
end
-
1
configure_facets
-
-
-
1
before_action :my_collections_controller_debug_output
-
-
1
before_action :provenance_log_update_before, only: [:update]
-
1
before_action :visiblity_changed, only: [:update]
-
-
1
after_action :provenance_log_update_after, only: [:update]
-
1
after_action :visibility_changed_update, only: [:update]
-
-
1
protect_from_forgery with: :null_session, only: [:display_provenance_log]
-
-
1
def my_collections_controller_debug_output
-
# ::Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
# Deepblue::LoggingHelper.called_from,
-
# "params=#{params}" ]
-
end
-
-
1
def curation_concern
-
@collection ||= ActiveFedora::Base.find(params[:id])
-
end
-
-
## Provenance log
-
-
1
def provenance_log_update_after
-
# ::Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
# Deepblue::LoggingHelper.called_from,
-
# Deepblue::LoggingHelper.obj_class( 'class', self ),
-
# "@update_attr_key_values=#{@update_attr_key_values}",
-
# "" ]
-
curation_concern.provenance_log_update_after( current_user: current_user,
-
# event_note: 'CollectionsController.provenance_log_update_after',
-
update_attr_key_values: @update_attr_key_values )
-
end
-
-
1
def provenance_log_update_before
-
# ::Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
# Deepblue::LoggingHelper.called_from,
-
# Deepblue::LoggingHelper.obj_class( 'class', self ),
-
# "" ]
-
return unless @update_attr_key_values.nil?
-
@update_attr_key_values = curation_concern.provenance_log_update_before( form_params: params[PARAMS_KEY].dup )
-
end
-
-
## end Provenance log
-
-
## display provenance log
-
-
1
def display_provenance_log
-
# load provenance log for this work
-
id = @collection.id # curation_concern.id
-
file_path = Deepblue::ProvenancePath.path_for_reference( id )
-
Deepblue::LoggingHelper.bold_debug [ "CollectionsController", "display_provenance_log", file_path ]
-
Deepblue::ProvenanceLogService.entries( id, refresh: true )
-
# continue on to normal display
-
#redirect_to [main_app, curation_concern]
-
redirect_back fallback_location: root_url
-
end
-
-
1
def display_provenance_log_enabled?
-
true
-
end
-
-
1
def provenance_log_entries_present?
-
provenance_log_entries.present?
-
end
-
-
## end display provenance log
-
-
## visibility / publish
-
-
1
def visiblity_changed
-
# ::Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
# Deepblue::LoggingHelper.called_from,
-
# Deepblue::LoggingHelper.obj_class( 'class', self ),
-
# "" ]
-
@update_attr_key_values = curation_concern.provenance_log_update_before( form_params: params[PARAMS_KEY].dup )
-
if visibility_to_private?
-
mark_as_set_to_private
-
elsif visibility_to_public?
-
mark_as_set_to_public
-
end
-
end
-
-
1
def visibility_changed_update
-
# ::Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
# Deepblue::LoggingHelper.called_from,
-
# Deepblue::LoggingHelper.obj_class( 'class', self ),
-
# "" ]
-
if curation_concern.private? && @visibility_changed_to_private
-
workflow_unpublish
-
elsif curation_concern.public? && @visibility_changed_to_public
-
workflow_publish
-
end
-
end
-
-
1
def visibility_to_private?
-
# ::Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
# Deepblue::LoggingHelper.called_from,
-
# Deepblue::LoggingHelper.obj_class( 'class', self ),
-
# "" ]
-
return false if curation_concern.private?
-
params[PARAMS_KEY]['visibility'] == Hydra::AccessControls::AccessRight::VISIBILITY_TEXT_VALUE_PRIVATE
-
end
-
-
1
def visibility_to_public?
-
# ::Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
# Deepblue::LoggingHelper.called_from,
-
# Deepblue::LoggingHelper.obj_class( 'class', self ),
-
# "" ]
-
return false if curation_concern.public?
-
params[PARAMS_KEY]['visibility'] == Hydra::AccessControls::AccessRight::VISIBILITY_TEXT_VALUE_PUBLIC
-
end
-
-
1
def mark_as_set_to_private
-
@visibility_changed_to_public = false
-
@visibility_changed_to_private = true
-
end
-
-
1
def mark_as_set_to_public
-
@visibility_changed_to_public = true
-
@visibility_changed_to_private = false
-
end
-
-
## end visibility / publish
-
-
1
def search_builder_class
-
Hyrax::My::CollectionsSearchBuilder
-
end
-
-
1
def index
-
add_breadcrumb t(:'hyrax.controls.home'), root_path
-
add_breadcrumb t(:'hyrax.dashboard.breadcrumbs.admin'), hyrax.dashboard_path
-
add_breadcrumb t(:'hyrax.admin.sidebar.collections'), hyrax.my_collections_path
-
collection_type_list_presenter
-
managed_collections_count
-
super
-
end
-
-
1
private
-
-
1
def search_action_url(*args)
-
hyrax.my_collections_url(*args)
-
end
-
-
# The url of the "more" link for additional facet values
-
1
def search_facet_path(args = {})
-
hyrax.my_dashboard_collections_facet_path(args[:id])
-
end
-
-
1
def collection_type_list_presenter
-
@collection_type_list_presenter ||= Hyrax::SelectCollectionTypeListPresenter.new(current_user)
-
end
-
-
1
def managed_collections_count
-
@managed_collection_count = Hyrax::Collections::ManagedCollectionsService.managed_collections_count(scope: self)
-
end
-
-
end
-
-
end
-
-
end
-
1
class RobotsController < ApplicationController
-
1
before_action :authenticate_user!, except: :show
-
1
before_action :find_robots_txt
-
1
before_action :throw_breadcrumbs, except: :show
-
1
layout 'hyrax/dashboard'
-
-
1
def show
-
3
render body: @robots_txt.value
-
end
-
-
1
def edit
-
2
authorize! :edit, @robots_txt
-
end
-
-
1
def update
-
2
authorize! :update, @robots_txt
-
1
respond_to do |format|
-
1
if @robots_txt.update(permitted_params)
-
2
format.html { redirect_to edit_robots_path, notice: 'robots.txt updated.' }
-
else
-
flash.now[:alert] = "robots.txt could not be updated. #{@robots_txt.errors.full_messages}"
-
format.html { render :edit }
-
end
-
end
-
end
-
-
1
private
-
-
1
def find_robots_txt
-
7
@robots_txt = ContentBlock.find_or_create_by(name: 'robots_txt')
-
end
-
-
1
def throw_breadcrumbs
-
4
add_breadcrumb t(:'hyrax.controls.home'), root_path
-
4
add_breadcrumb t(:'hyrax.dashboard.breadcrumbs.admin'), hyrax.dashboard_path
-
4
add_breadcrumb t(:'hyrax.admin.sidebar.configuration'), '#'
-
4
add_breadcrumb 'robots.txt', edit_robots_path
-
end
-
-
1
def permitted_params
-
1
params.require(:content_block).permit(:value)
-
end
-
end
-
# frozen_string_literal: true
-
-
1
module Deepbluedocs
-
-
1
module DefaultWorkFormBehavior
-
1
extend ActiveSupport::Concern
-
1
included do
-
# include ScholarsArchive::DateTermsBehavior
-
# include ScholarsArchive::NestedBehavior
-
-
# accessor attributes only used to group dates and geo fields and allow proper ordering in this form
-
1
attr_accessor :dates_section
-
1
attr_accessor :geo_section
-
-
1
attr_accessor :other_affiliation_other
-
1
attr_accessor :degree_level_other
-
1
attr_accessor :degree_field_other
-
1
attr_accessor :degree_name_other
-
-
# order isn't significant for self.terms
-
1
self.terms += %i[
-
academic_affiliation
-
additional_information
-
alt_title
-
bibliographic_citation
-
conference_location
-
conference_name
-
conference_section
-
date_accepted
-
date_available
-
date_collected
-
date_copyright
-
date_issued
-
date_modified
-
date_reviewed
-
date_uploaded
-
date_valid
-
degree_field
-
degree_level
-
degree_name
-
description_abstract
-
digitization_spec
-
dspace_collection
-
dspace_community
-
embargo_reason
-
file_extent
-
file_format
-
funding_body
-
funding_statement
-
hydrologic_unit_code
-
in_series
-
isbn
-
issn
-
license
-
peerreviewed
-
replaces
-
resource_type
-
tableofcontents
-
]
-
-
1
self.required_fields += [:resource_type]
-
1
self.required_fields -= [:keyword]
-
-
1
class_attribute :default_work_primary_terms
-
# TODO: is order significant self.default_work_primary_terms?
-
1
self.default_work_primary_terms =
-
%i[
-
title
-
alt_title
-
creator
-
contributor
-
description_abstract
-
license
-
resource_type
-
identifier
-
dates_section
-
degree_level
-
degree_name
-
degree_field
-
bibliographic_citation
-
academic_affiliation
-
in_series
-
subject
-
tableofcontents
-
rights_statement
-
]
-
-
1
class_attribute :default_work_secondary_terms
-
# TODO: is order significant self.default_work_secondary_terms?
-
1
self.default_work_secondary_terms =
-
%i[
-
hydrologic_unit_code
-
geo_section
-
funding_statement
-
publisher
-
peerreviewed
-
conference_location
-
conference_name
-
conference_section
-
language
-
file_format
-
file_extent
-
digitization_spec
-
replaces
-
additional_information
-
isbn
-
issn
-
]
-
-
1
def primary_terms
-
1
if current_ability.admin?
-
default_work_primary_terms | super
-
default_work_primary_terms.delete(:curation_notes_admin)
-
default_work_primary_terms.delete(:curation_notes_user)
-
default_work_primary_terms << :curation_notes_admin
-
default_work_primary_terms << :curation_notes_user
-
default_work_primary_terms
-
else
-
1
default_work_primary_terms | super
-
1
default_work_primary_terms.delete(:curation_notes_admin)
-
1
default_work_primary_terms.delete(:curation_notes_user)
-
1
default_work_primary_terms
-
end
-
end
-
-
1
def secondary_terms
-
1
t = default_work_secondary_terms
-
# jose admin? not found byebug
-
# t << [:keyword, :source, :funding_body, :dspace_community, :dspace_collection] if current_ability.current_user.admin?
-
1
t.flatten
-
end
-
-
1
def self.date_terms
-
%i[
-
date_created
-
date_available
-
date_copyright
-
date_issued
-
date_collected
-
date_valid
-
date_reviewed
-
date_accepted
-
]
-
end
-
-
1
def date_terms
-
self.class.date_terms
-
end
-
-
1
def self.build_permitted_params
-
super + date_terms + %i[degree_level degree_name degree_field] + [:embargo_reason] + [
-
{
-
nested_geo_attributes: %i[id
-
_destroy
-
point_lat
-
point_lon
-
bbox_lat_north
-
bbox_lon_west
-
bbox_lat_south
-
bbox_lon_east
-
label
-
point
-
bbox],
-
nested_related_items_attributes: %i[id _destroy label related_url]
-
}
-
] + [
-
{
-
other_affiliation_other: [],
-
degree_field_other: [],
-
degree_name_other: []
-
}
-
]
-
end
-
end
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
module Deepbluedocs
-
-
1
module DissertationWorkFormBehavior
-
1
extend ActiveSupport::Concern
-
1
included do
-
# include ScholarsArchive::DateTermsBehavior
-
# include ScholarsArchive::NestedBehavior
-
-
# accessor attributes only used to group dates and geo fields and allow proper ordering in this form
-
1
attr_accessor :dates_section
-
1
attr_accessor :geo_section
-
-
1
attr_accessor :other_affiliation_other
-
1
attr_accessor :degree_level_other
-
1
attr_accessor :degree_field_other
-
1
attr_accessor :degree_name_other
-
-
1
self.terms += [ :description_thesisdegreediscipline, :date_submitted, :contributor_advisor, :contributor_committeemember, :description_thesisdegreegrantor, :identifier, :identifier_orcid, :type_none, :type_snre, :subject_other, :language_none, :contributor_author, :identifier_uniqname, :description_thesisdegreename, :date_uploaded, :date_modified, :academic_affiliation, :alt_title, :description_abstract, :license, :resource_type, :date_available, :date_copyright, :date_issued, :date_collected, :date_valid, :date_reviewed, :date_accepted, :degree_level, :degree_name, :degree_field, :replaces, :hydrologic_unit_code, :funding_body, :funding_statement, :in_series, :tableofcontents, :bibliographic_citation, :peerreviewed, :additional_information, :digitization_spec, :file_extent, :file_format, :dspace_community, :dspace_collection, :isbn, :issn, :embargo_reason, :conference_location, :conference_name, :conference_section]
-
-
1
self.required_fields += [:resource_type, :identifier_uniqname, :description_thesisdegreename, :description_thesisdegreediscipline, :date_issued, :date_submitted, :contributor_advisor, :contributor_committeemember, :description_abstract, :description_thesisdegreegrantor]
-
-
1
def primary_terms
-
[:title, :identifier, :creator, :identifier_uniqname, :identifier_orcid, :description_thesisdegreename, :type_snre, :description_thesisdegreediscipline, :date_issued, :date_submitted, :contributor_advisor, :contributor_committeemember, :description_abstract, :keyword, :subject_other, :description_thesisdegreegrantor, :rights_statement]
-
end
-
-
1
def secondary_terms
-
t = [:language_none, :date_available]
-
# jose admin? not found byebug
-
#t << [:keyword, :source, :funding_body, :dspace_community, :dspace_collection] if current_ability.current_user.admin?
-
t.flatten
-
end
-
-
1
def self.date_terms
-
[
-
:date_created,
-
:date_available,
-
:date_copyright,
-
:date_issued,
-
:date_collected,
-
:date_valid,
-
:date_reviewed,
-
:date_accepted,
-
]
-
end
-
-
1
def date_terms
-
self.class.date_terms
-
end
-
-
1
def self.build_permitted_params
-
super + self.date_terms + [:degree_level, :degree_name, :degree_field] + [:embargo_reason] + [
-
{
-
:nested_geo_attributes => [:id, :_destroy, :point_lat, :point_lon, :bbox_lat_north, :bbox_lon_west, :bbox_lat_south, :bbox_lon_east, :label, :point, :bbox],
-
:nested_related_items_attributes => [:id, :_destroy, :label, :related_url]
-
}
-
] + [
-
{
-
:other_affiliation_other => [],
-
:degree_field_other => [],
-
:degree_name_other => []
-
}
-
]
-
end
-
end
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
module Deepbluedocs
-
-
1
module GenericWorkFormBehavior
-
1
extend ActiveSupport::Concern
-
1
included do
-
# include ScholarsArchive::DateTermsBehavior
-
# include ScholarsArchive::NestedBehavior
-
-
# accessor attributes only used to group dates and geo fields and allow proper ordering in this form
-
1
attr_accessor :dates_section
-
1
attr_accessor :geo_section
-
-
1
attr_accessor :other_affiliation_other
-
1
attr_accessor :degree_level_other
-
1
attr_accessor :degree_field_other
-
1
attr_accessor :degree_name_other
-
-
1
self.terms += [ :description_sponsorship, :description_mapping, :type_none, :identifier_source, :other_affiliation, :identifier_orcid, :contributor_affiliationumcampus, :contributor_author, :relation_ispartofseries, :date_uploaded, :date_modified, :academic_affiliation, :alt_title, :description_abstract, :license, :resource_type, :date_available, :date_copyright, :date_issued, :date_collected, :date_valid, :date_reviewed, :date_accepted, :degree_level, :degree_name, :degree_field, :replaces, :hydrologic_unit_code, :funding_body, :funding_statement, :in_series, :tableofcontents, :bibliographic_citation, :peerreviewed, :additional_information, :digitization_spec, :file_extent, :file_format, :dspace_community, :dspace_collection, :isbn, :issn, :embargo_reason, :conference_location, :conference_name, :conference_section, :language_none, :subject_hlbtoplevel, :subject_hlbsecondlevel, :description_bitstreamurl, :description_provenance]
-
-
1
self.required_fields += [:resource_type, :contributor_affiliationumcampus]
-
1
self.required_fields -= [:keyword]
-
-
1
def primary_terms
-
[:creator, :identifier_orcid, :academic_affiliation, :other_affiliation, :contributor_affiliationumcampus, :title, :alt_title, :date_issued, :identifier_source, :publisher, :peerreviewed, :bibliographic_citation, :relation_ispartofseries, :identifier, :rights_statement]
-
end
-
-
1
def secondary_terms
-
t = [:type_none, :language_none, :description_mapping, :subject, :description_abstract, :description_sponsorship, :description]
-
# jose admin? not found byebug
-
#t << [:keyword, :source, :funding_body, :dspace_community, :dspace_collection] if current_ability.current_user.admin?
-
t.flatten
-
end
-
-
1
def self.date_terms
-
[
-
:date_created,
-
:date_available,
-
:date_copyright,
-
:date_issued,
-
:date_collected,
-
:date_valid,
-
:date_reviewed,
-
:date_accepted,
-
:date_accessioned,
-
]
-
end
-
-
1
def date_terms
-
self.class.date_terms
-
end
-
-
1
def self.build_permitted_params
-
super + self.date_terms + [:degree_level, :degree_name, :degree_field, :subject_hlbtoplevel, :subject_hlbsecondlevel, :description_bitstreamurl, :description_provenance] + [:embargo_reason] + [
-
{
-
:nested_geo_attributes => [:id, :_destroy, :point_lat, :point_lon, :bbox_lat_north, :bbox_lon_west, :bbox_lat_south, :bbox_lon_east, :label, :point, :bbox],
-
:nested_related_items_attributes => [:id, :_destroy, :label, :related_url]
-
}
-
] + [
-
{
-
:other_affiliation_other => [],
-
:degree_field_other => [],
-
:degree_name_other => []
-
}
-
]
-
end
-
end
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
module Hyrax
-
-
1
class DataSetForm < DeepblueForm
-
-
1
self.model_class = ::DataSet
-
-
1
self.terms -= %i[ rights_statement ]
-
1
self.terms +=
-
%i[
-
authoremail
-
date_coverage
-
description
-
resource_type
-
publisher
-
fundedby
-
fundedby_other
-
doi
-
description_abstract
-
keyword
-
methodology
-
referenced_by
-
rights_license
-
rights_license_other
-
license_other
-
curation_notes_admin
-
curation_notes_user
-
geo_location_place
-
geo_location_box
-
]
-
-
1
self.default_work_primary_terms =
-
%i[
-
title
-
creator
-
authoremail
-
methodology
-
resource_type
-
description_abstract
-
description
-
publisher
-
date_coverage
-
rights_license
-
rights_license_other
-
license_other
-
fundedby
-
fundedby_other
-
keyword
-
language
-
referenced_by
-
curation_notes_admin
-
curation_notes_user
-
geo_location_place
-
geo_location_box
-
]
-
-
1
self.default_work_secondary_terms = []
-
-
1
self.required_fields =
-
%i[
-
title
-
creator
-
authoremail
-
methodology
-
description
-
rights_license
-
resource_type
-
description_abstract
-
publisher
-
]
-
-
1
def data_set?
-
1
true
-
end
-
-
1
def merge_date_coverage_attributes!(hsh)
-
2
@attributes.merge!(hsh&.stringify_keys || {})
-
end
-
-
end
-
-
end
-
# frozen_string_literal: truez
-
-
1
module Hyrax
-
-
1
class DeepblueForm < Hyrax::Forms::WorkForm
-
-
1
include Deepbluedocs::DefaultWorkFormBehavior
-
-
1
def data_set?
-
false
-
end
-
-
1
def dissertation?
-
false
-
end
-
-
1
def generic_work?
-
false
-
end
-
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
module Hyrax
-
# Generated form for Dissertation
-
1
class DissertationForm < DeepblueForm
-
1
include Deepbluedocs::DissertationWorkFormBehavior
-
-
1
self.model_class = ::Dissertation
-
1
self.terms += [:resource_type]
-
end
-
end
-
# frozen_string_literal: true
-
-
1
module Hyrax
-
-
1
module Forms
-
-
1
class CollectionForm
-
1
include HydraEditor::Form
-
1
include HydraEditor::Form::Permissions
-
1
include ::Hyrax::BrandingHelper
-
# Used by the search builder
-
1
attr_reader :scope
-
-
1
delegate :id, :depositor, :permissions, :human_readable_type, :member_ids, :nestable?, to: :model
-
-
1
class_attribute :membership_service_class, :default_work_primary_terms, :default_work_secondary_terms
-
-
# Required for search builder (FIXME)
-
1
alias collection model
-
-
1
self.model_class = ::Collection
-
-
1
self.membership_service_class = Collections::CollectionMemberService
-
-
1
delegate :blacklight_config, to: Hyrax::CollectionsController
-
-
1
self.terms = %i[
-
authoremail
-
based_near
-
collection_type_gid
-
contributor
-
creator
-
date_coverage
-
date_created
-
description
-
fundedby
-
grantnumber
-
identifier
-
keyword
-
language
-
license
-
methodology
-
publisher
-
referenced_by
-
related_url
-
representative_id
-
resource_type
-
rights_license
-
subject
-
subject_discipline
-
thumbnail_id
-
title
-
visibility
-
]
-
-
1
self.default_work_primary_terms =
-
%i[
-
title
-
creator
-
description
-
keyword
-
subject_discipline
-
language
-
referenced_by
-
]
-
-
1
self.default_work_secondary_terms = []
-
-
1
self.required_fields = %i[
-
title
-
creator
-
description
-
subject_discipline
-
]
-
-
1
ProxyScope = Struct.new(:current_ability, :repository, :blacklight_config) do
-
1
def can?(*args)
-
current_ability.can?(*args)
-
end
-
end
-
-
# @param model [Collection] the collection model that backs this form
-
# @param current_ability [Ability] the capabilities of the current user
-
# @param repository [Blacklight::Solr::Repository] the solr repository
-
1
def initialize(model, current_ability, repository)
-
super(model)
-
@scope = ProxyScope.new(current_ability, repository, blacklight_config)
-
end
-
-
1
def permission_template
-
@permission_template ||= begin
-
template_model = PermissionTemplate.find_or_create_by(source_id: model.id)
-
PermissionTemplateForm.new(template_model)
-
end
-
end
-
-
# @return [Hash] All FileSets in the collection, file.to_s is the key, file.id is the value
-
1
def select_files
-
Hash[all_files_with_access]
-
end
-
-
# Terms that appear above the accordion
-
1
def primary_terms
-
default_work_primary_terms
-
end
-
-
# Terms that appear within the accordion
-
1
def secondary_terms
-
default_work_secondary_terms
-
end
-
-
1
def relative_url_root
-
rv = ::DeepBlueDocs::Application.config.relative_url_root
-
return rv if rv
-
''
-
end
-
-
1
def banner_info
-
@banner_info ||= branding_banner_info( id: id )
-
end
-
-
1
def logo_info
-
@logo_info ||= branding_logo_info( id: id )
-
end
-
-
# Do not display additional fields if there are no secondary terms
-
# @return [Boolean] display additional fields on the form?
-
1
def display_additional_fields?
-
secondary_terms.any?
-
end
-
-
1
def thumbnail_title
-
return unless model.thumbnail
-
model.thumbnail.title.first
-
end
-
-
1
def list_parent_collections
-
collection.member_of_collections
-
end
-
-
1
def list_child_collections
-
collection_member_service.available_member_subcollections.documents
-
end
-
-
1
def available_parent_collections(scope:)
-
return @available_parents if @available_parents.present?
-
-
collection = Collection.find(id)
-
colls = Hyrax::Collections::NestedCollectionQueryService.available_parent_collections(child: collection, scope: scope, limit_to_id: nil)
-
@available_parents = colls.map do |col|
-
{ "id" => col.id, "title_first" => col.title.first }
-
end
-
@available_parents.to_json
-
end
-
-
1
private
-
-
1
def all_files_with_access
-
member_presenters(member_work_ids).flat_map(&:file_set_presenters).map { |x| [x.to_s, x.id] }
-
end
-
-
# Override this method if you have a different way of getting the member's ids
-
1
def member_work_ids
-
response = collection_member_service.available_member_work_ids.response
-
response.fetch('docs').map { |doc| doc['id'] }
-
end
-
-
1
def collection_member_service
-
@collection_member_service ||= membership_service_class.new(scope: scope, collection: collection, params: blacklight_config.default_solr_params)
-
end
-
-
1
def member_presenters(member_ids)
-
PresenterFactory.build_for(ids: member_ids,
-
presenter_class: WorkShowPresenter,
-
presenter_args: [nil])
-
end
-
-
end
-
-
end
-
-
end
-
# Generated via
-
# `rails generate hyrax:work GenericWork`
-
1
module Hyrax
-
# Generated form for GenericWork
-
1
class GenericWorkForm < Hyrax::Forms::WorkForm
-
1
include Deepbluedocs::GenericWorkFormBehavior
-
-
1
self.model_class = ::GenericWork
-
1
self.terms += [:resource_type]
-
end
-
end
-
1
module ApplicationHelper
-
end
-
# frozen_string_literal: true
-
-
1
module Deepblue
-
-
1
module EmailHelper
-
1
extend ActionView::Helpers::TranslationHelper
-
-
1
def self.contact_email
-
Settings.hyrax.contact_email
-
end
-
-
1
def self.curation_concern_url( curation_concern: )
-
if curation_concern.is_a?( DataSet )
-
data_set_url( id: curation_concern.id )
-
elsif curation_concern.is_a?( FileSet )
-
file_set_url( id: curation_concern.id )
-
elsif curation_concern.is_a?( Collection )
-
collection_url( id: curation_concern.id )
-
else
-
""
-
end
-
end
-
-
1
def self.collection_url( id: nil, collection: nil )
-
id = collection.id if collection.present?
-
host = hostname
-
Rails.application.routes.url_helpers.hyrax_collection_url( id: id, host: host, only_path: false )
-
end
-
-
1
def self.data_set_url( id: nil, data_set: nil )
-
id = data_set.id if data_set.present?
-
host = hostname
-
Rails.application.routes.url_helpers.hyrax_data_set_url( id: id, host: host, only_path: false )
-
end
-
-
1
def self.file_set_url( id: nil, file_set: nil )
-
id = file_set.id if file_set.present?
-
host = hostname
-
Rails.application.routes.url_helpers.hyrax_file_set_url( id: id, host: host, only_path: false )
-
end
-
-
1
def self.echo_to_rails_logger
-
DeepBlueDocs::Application.config.email_log_echo_to_rails_logger
-
end
-
-
1
def self.hostname
-
rv = Settings.hostname
-
return rv unless rv.nil?
-
# then we are in development mode
-
"http://localhost:3000/#{Settings.relative_url_root}/"
-
end
-
-
1
def self.log( class_name: 'UnknownClass',
-
event: 'unknown',
-
event_note: '',
-
id: 'unknown_id',
-
timestamp: LoggingHelper.timestamp_now,
-
to:,
-
to_note: '',
-
from:,
-
subject:,
-
message: '',
-
**key_values )
-
-
email_enabled = DeepBlueDocs::Application.config.email_enabled
-
added_key_values = if to_note.blank?
-
{ to: to, from: from, subject: subject, message: message, email_enabled: email_enabled }
-
else
-
{ to: to, to_note: to_note, from: from, subject: subject, message: message, email_enabled: email_enabled }
-
end
-
key_values.merge! added_key_values
-
LoggingHelper.log( class_name: class_name,
-
event: event,
-
event_note: event_note,
-
id: id,
-
timestamp: timestamp,
-
echo_to_rails_logger: EmailHelper.echo_to_rails_logger,
-
logger: EMAIL_LOGGER,
-
**key_values )
-
end
-
-
1
def self.log_raw( msg )
-
EMAIL_LOGGER.info( msg )
-
end
-
-
1
def self.notification_email
-
1
Rails.configuration.notification_email
-
end
-
-
1
def self.send_email( to:, from:, subject:, body:, log: false )
-
email_enabled = DeepBlueDocs::Application.config.email_enabled
-
is_enabled = email_enabled ? "is enabled" : "is not enabled"
-
LoggingHelper.bold_debug [ "EmailHelper.send_email #{is_enabled}", "to: #{to} from: #{from} subject: #{subject}\nbody:\n#{body}" ] if log
-
return if to.blank?
-
return unless email_enabled
-
email = DeepblueMailer.send_an_email( to: to, from: from, subject: subject, body: body )
-
email.deliver_now
-
end
-
-
1
def self.user_email
-
Rails.configuration.user_email
-
end
-
-
1
def self.user_email_from( current_user, user_signed_in: true )
-
return nil unless user_signed_in
-
user_email = nil
-
unless current_user.nil?
-
# LoggingHelper.debug "current_user=#{current_user}"
-
# LoggingHelper.debug "current_user.name=#{current_user.name}"
-
# LoggingHelper.debug "current_user.email=#{current_user.email}"
-
user_email = current_user.email
-
end
-
user_email
-
end
-
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
module Deepblue
-
-
1
module EventHelper
-
-
1
def self.after_batch_create_failure_callback( user: )
-
1
LoggingCallback.process_event_user( event_name: :after_batch_create_failure, user: user )
-
end
-
-
1
def self.after_batch_create_success_callback( user: )
-
1
LoggingCallback.process_event_user( event_name: :after_batch_create_success, user: user )
-
end
-
-
1
def self.after_create_concern_callback( curation_concern:, user: )
-
2
LoggingCallback.process_event_curation_concern( event_name: :after_create_concern,
-
curation_concern: curation_concern,
-
user: user )
-
2
return unless curation_concern.respond_to? :provenance_create
-
1
curation_concern.provenance_create( current_user: user, event_note: 'after_create_concern' )
-
end
-
-
1
def self.after_create_fileset_callback( file_set:, user: )
-
2
LoggingCallback.process_event_file_set( event_name: :after_create_fileset, file_set: file_set, user: user )
-
2
return unless file_set.respond_to? :provenance_create
-
1
file_set.provenance_create( current_user: user, event_note: 'after_create_fileset' )
-
end
-
-
1
def self.after_destroy_callback( id:, user: )
-
LoggingCallback.process_event_user( event_name: :after_destroy, user: user, msg: "id: #{id}" )
-
end
-
-
1
def self.after_fixity_check_failure_callback( file_set:, checksum_audit_log: )
-
LoggingCallback.process_event( event_name: :after_fixity_check_failure,
-
msg: "file_set: #{file_set} checksum_audit_log: #{checksum_audit_log}" )
-
end
-
-
1
def self.after_import_url_failure_callback( file_set:, user: )
-
LoggingCallback.process_event_file_set( event_name: :after_import_url_failure, file_set: file_set, user: user )
-
end
-
-
1
def self.after_revert_content_callback( file_set:, user: )
-
LoggingCallback.process_event_file_set( event_name: :after_revert_content, file_set: file_set, user: user )
-
end
-
-
# :after_update_content callback replaced by after_perform block in IngestJob
-
1
def self.after_update_content
-
# TODO
-
end
-
-
1
def self.after_update_metadata_callback( curation_concern:, user: )
-
LoggingCallback.process_event_curation_concern( event_name: :after_update_metadata,
-
curation_concern: curation_concern,
-
user: user )
-
# return unless curation_concern.respond_to? :provenance_log_update_after
-
# curation_concern.provenance_log_update_after( current_user: user, event_note: 'after_update_metadata' )
-
end
-
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
module Deepblue
-
-
1
module ExportFilesHelper
-
-
1
require 'down'
-
-
1
def self.export_file_uri( source_uri:, target_file: )
-
if source_uri.starts_with?( "http:" ) || source_uri.starts_with?( "https:" )
-
begin
-
# see: https://github.com/janko-m/down
-
Down.download( source_uri, destination: target_file )
-
bytes_exported = File.size target_file
-
rescue Exception => e # rubocop:disable Lint/RescueException
-
Rails.logger.error "ExportFilesHelper.export_file_uri(#{source_uri},#{target_file}) #{e.class}: #{e.message} at #{e.backtrace[0]}"
-
bytes_exported = open( source_uri ) { |io| IO.copy_stream( io, target_file ) }
-
end
-
else
-
bytes_exported = open( source_uri ) { |io| IO.copy_stream( io, target_file ) }
-
end
-
return bytes_exported
-
end
-
-
1
def self.export_file_uri_bytes( source_uri: )
-
# TODO: replace this with Down gem
-
bytes_expected = -1
-
open( source_uri ) { |io| bytes_expected = io.meta['content-length'] }
-
return bytes_expected
-
end
-
-
1
def self.export_file_sets( target_dir:,
-
file_sets:,
-
log_prefix: "export_file_sets",
-
do_export_predicate: ->(_target_file_name, _target_file) { true },
-
quiet: false,
-
&on_export_block )
-
-
LoggingHelper.debug "#{log_prefix} Starting export to #{target_dir}" unless quiet
-
files_extracted = {}
-
total_bytes = 0
-
file_sets.each do |file_set|
-
file = file_set.files_to_file
-
if file.nil?
-
Rails.logger.warn "#{log_prefix} file_set.id #{file_set.id} files[0] is nil"
-
elsif file_set.archive_file?
-
# exclude from .zip
-
else
-
target_file_name = file_set.label
-
# fix possible issues with target file name
-
target_file_name = '_nil_' if target_file_name.nil?
-
target_file_name = '_empty_' if target_file_name.empty?
-
if files_extracted.key? target_file_name
-
dup_count = 1
-
base_ext = File.extname target_file_name
-
base_target_file_name = File.basename target_file_name, base_ext
-
target_file_name = base_target_file_name + "_" + dup_count.to_s.rjust( 3, '0' ) + base_ext
-
while files_extracted.key? target_file_name
-
dup_count += 1
-
target_file_name = base_target_file_name + "_" + dup_count.to_s.rjust( 3, '0' ) + base_ext
-
end
-
end
-
files_extracted.store( target_file_name, true )
-
target_file = target_dir.join target_file_name
-
if do_export_predicate.call( target_file_name, target_file )
-
source_uri = file.uri.value
-
# LoggingHelper.debug "#{log_prefix} #{source_uri} exists? #{File.exist?( source_uri )}" unless quiet
-
LoggingHelper.debug "#{log_prefix} export #{target_file} << #{source_uri}" unless quiet
-
bytes_copied = export_file_uri( source_uri: source_uri, target_file: target_file )
-
total_bytes += bytes_copied
-
copied = DeepblueHelper.human_readable_size( bytes_copied )
-
LoggingHelper.debug "#{log_prefix} copied #{copied} to #{target_file}" unless quiet
-
on_export_block.call( target_file_name, target_file ) if on_export_block # rubocop:disable Style/SafeNavigation
-
else
-
LoggingHelper.debug "#{log_prefix} skipped export of #{target_file}" unless quiet
-
end
-
end
-
end
-
total_copied = DeepblueHelper.human_readable_size( total_bytes )
-
LoggingHelper.debug "#{log_prefix} Finished export to #{target_dir}; total #{total_copied} in #{files_extracted.size} files" unless quiet
-
total_bytes
-
end
-
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
module Deepblue
-
-
1
module IngestHelper
-
-
1
INGEST_HELPER_VERBOSE = true
-
-
# @param [FileSet] file_set
-
# @param [String] repository_file_id identifier for a Hydra::PCDM::File
-
# @param [String, NilClass] file_path the cached file within the Hyrax.config.working_path
-
1
def self.characterize( file_set,
-
repository_file_id,
-
file_path = nil,
-
continue_job_chain: true,
-
continue_job_chain_later: true,
-
current_user: IngestHelper.current_user,
-
delete_input_file: true,
-
uploaded_file_ids: [],
-
**added_prov_key_values )
-
-
Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
Deepblue::LoggingHelper.called_from,
-
"file_set=#{file_set}",
-
"repository_file_id=#{repository_file_id}",
-
"file_path=#{file_path}",
-
"continue_job_chain=#{continue_job_chain}",
-
"continue_job_chain_later=#{continue_job_chain_later}",
-
"current_user=#{current_user}",
-
"delete_input_file=#{delete_input_file}",
-
"uploaded_file_ids=#{uploaded_file_ids}",
-
"added_prov_key_values=#{added_prov_key_values}",
-
# "wrapper.methods=#{wrapper.methods.sort}",
-
"" ] if INGEST_HELPER_VERBOSE
-
# See Hyrax gem: app/job/characterize_job.rb
-
file_name = Hyrax::WorkingDirectory.find_or_retrieve( repository_file_id, file_set.id, file_path )
-
# file_ext = File.extname file_set.label
-
# if DeepBlueDocs::Application.config.characterize_excluded_ext_set.key? file_ext
-
# Rails.logger.info "Skipping characterization of file with extension #{file_ext}: #{file_name}"
-
# file_set.provenance_characterize( current_user: current_user,
-
# event_note: "skipped_extension(#{file_ext})",
-
# calling_class: name,
-
# **added_prov_key_values )
-
# perform_create_derivatives_job( file_set,
-
# repository_file_id,
-
# file_name,
-
# file_path,
-
# delete_input_file: delete_input_file,
-
# continue_job_chain: continue_job_chain,
-
# continue_job_chain_later: continue_job_chain_later,
-
# current_user: current_user,
-
# **added_prov_key_values )
-
# return
-
# end
-
unless file_set.characterization_proxy?
-
error_msg = "#{file_set.class.characterization_proxy} was not found"
-
Rails.logger.error error_msg
-
raise LoadError, error_msg
-
end
-
begin
-
proxy = file_set.characterization_proxy
-
Hydra::Works::CharacterizationService.run( proxy, file_name )
-
Rails.logger.debug "Ran characterization on #{proxy.id} (#{proxy.mime_type})"
-
file_set.provenance_characterize( current_user: current_user,
-
calling_class: name,
-
**added_prov_key_values )
-
file_set.characterization_proxy.save!
-
file_set.update_index
-
file_set.parent&.in_collections&.each(&:update_index)
-
# file_set.parent.in_collections.each( &:update_index ) if file_set.parent
-
rescue Exception => e # rubocop:disable Lint/RescueException
-
Rails.logger.error "IngestHelper.create_derivatives(#{file_name}) #{e.class}: #{e.message} at #{e.backtrace[0]}"
-
ensure
-
update_total_file_size( file_set, log_prefix: "CharacterizationHelper.characterize()" )
-
perform_create_derivatives_job( file_set,
-
repository_file_id,
-
file_name,
-
file_path,
-
continue_job_chain: continue_job_chain,
-
continue_job_chain_later: continue_job_chain_later,
-
current_user: current_user,
-
delete_input_file: delete_input_file,
-
uploaded_file_ids: uploaded_file_ids,
-
**added_prov_key_values )
-
end
-
end
-
-
# @param [FileSet] file_set
-
# @param [String] repository_file_id identifier for a Hydra::PCDM::File
-
# @param [String, NilClass] file_path the cached file within the Hyrax.config.working_path
-
1
def self.create_derivatives( file_set,
-
repository_file_id,
-
file_path = nil,
-
current_user: IngestHelper.current_user,
-
delete_input_file: true,
-
uploaded_file_ids: [],
-
**added_prov_key_values )
-
-
Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
Deepblue::LoggingHelper.called_from,
-
"file_set=#{file_set}",
-
"repository_file_id=#{repository_file_id}",
-
"file_path=#{file_path}",
-
"current_user=#{current_user}",
-
"delete_input_file=#{delete_input_file}",
-
"uploaded_file_ids=#{uploaded_file_ids}",
-
"added_prov_key_values=#{added_prov_key_values}",
-
# "wrapper.methods=#{wrapper.methods.sort}",
-
"" ] if INGEST_HELPER_VERBOSE
-
# See Hyrax gem: app/job/create_derivatives_job.rb
-
file_name = Hyrax::WorkingDirectory.find_or_retrieve( repository_file_id, file_set.id, file_path )
-
Rails.logger.warn "Create derivatives for: #{file_name}."
-
begin
-
file_ext = File.extname file_set.label
-
if DeepBlueDocs::Application.config.derivative_excluded_ext_set.key? file_ext
-
Rails.logger.info "Skipping derivative of file with extension #{file_ext}: #{file_name}"
-
file_set.provenance_create_derivative( current_user: current_user,
-
event_note: "skipped_extension #{file_ext}",
-
calling_class: name,
-
**added_prov_key_values )
-
return
-
end
-
if file_set.video? && !Hyrax.config.enable_ffmpeg
-
Rails.logger.info "Skipping video derivative job for file: #{file_name}"
-
file_set.provenance_create_derivative( current_user: current_user,
-
event_note: "skipped_extension #{file_ext}",
-
calling_class: name,
-
**added_prov_key_values )
-
return
-
end
-
threshold_file_size = DeepBlueDocs::Application.config.derivative_max_file_size
-
if threshold_file_size > -1 && File.exist?(file_name) && File.size(file_name) > threshold_file_size
-
human_readable = ActiveSupport::NumberHelper::NumberToHumanSizeConverter.convert( threshold_file_size, precision: 3 )
-
Rails.logger.info "Skipping file larger than #{human_readable} for create derivative job file: #{file_name}"
-
file_set.provenance_create_derivative( current_user: current_user,
-
event_note: "skipped_file_size #{File.size(file_name)}",
-
calling_class: name,
-
**added_prov_key_values )
-
return
-
end
-
Rails.logger.debug "About to call create derivatives: #{file_name}."
-
file_set.create_derivatives( file_name )
-
Rails.logger.debug "Create derivatives successful: #{file_name}."
-
file_set.provenance_create_derivative( current_user: current_user,
-
calling_class: name,
-
**added_prov_key_values )
-
# Reload from Fedora and reindex for thumbnail and extracted text
-
file_set.reload
-
file_set.update_index
-
file_set.parent.update_index if parent_needs_reindex?(file_set)
-
Rails.logger.debug "Successful create derivative job for file: #{file_name}"
-
rescue Exception => e # rubocop:disable Lint/RescueException
-
Rails.logger.error "IngestHelper.create_derivatives(#{file_set},#{repository_file_id},#{file_path}) #{e.class}: #{e.message} at #{e.backtrace[0]}"
-
ensure
-
# This is the last step in the process ( ingest job -> characterization job -> create derivative (last step))
-
# So now it's safe to remove the file uploaded file.
-
delete_file( file_path, delete_file_flag: delete_input_file, msg_prefix: 'Create derivatives ' )
-
end
-
end
-
-
1
def self.current_user
-
ProvenanceHelper.system_as_current_user
-
end
-
-
1
def self.delete_file( file_path, delete_file_flag: false, msg_prefix: '' )
-
return unless delete_file_flag
-
return unless File.exist? file_path
-
File.delete file_path
-
Rails.logger.debug "#{msg_prefix}file deleted: #{file_path}"
-
end
-
-
# @param [FileSet] file_set
-
# @param [String] filepath the cached file within the Hyrax.config.working_path
-
# @param [User] user
-
# @option opts [String] mime_type
-
# @option opts [String] filename
-
# @option opts [String] relation, ex. :original_file
-
1
def self.ingest( file_set, path, _user, uploaded_file_ids = [], opts = {} )
-
Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
Deepblue::LoggingHelper.called_from,
-
"file_set=#{file_set}",
-
"path=#{path}",
-
"uploaded_file_ids=#{uploaded_file_ids}",
-
"user=#{_user}",
-
"opts=#{opts}",
-
# "wrapper.methods=#{wrapper.methods.sort}",
-
"" ] if INGEST_HELPER_VERBOSE
-
# launched from Hyrax gem: app/actors/hyrax/actors/file_set_actor.rb FileSetActor#create_content
-
# See Hyrax gem: app/job/ingest_local_file_job.rb
-
# def perform(file_set, path, user)
-
file_set.label ||= File.basename(path)
-
file_set_actor_create_content( file_set, File.open(path), uploaded_file_ids: uploaded_file_ids, bypass_fedora: opts[:bypass_fedora])
-
end
-
-
1
def self.file_set_actor_create_content( file_set, file, relation = :original_file, uploaded_file_ids: [], bypass_fedora: false )
-
# If the file set doesn't have a title or label assigned, set a default.
-
file_set.label ||= label_for( file )
-
file_set.title = [file_set.label] if file_set.title.blank?
-
return false unless file_set.save # Need to save to get an id
-
if bypass_fedora
-
Hydra::Works::AddExternalFileToFileSet.call( file_set,
-
bypass_fedora,
-
relation,
-
versioning: false )
-
return false unless file_set.save
-
repository_file = related_file( file_set, relation )
-
Hyrax::VersioningService.create( repository_file, user )
-
else
-
# if from_url
-
# # If ingesting from URL, don't spawn an IngestJob; instead
-
# # reach into the FileActor and run the ingest with the file instance in
-
# # hand. Do this because we don't have the underlying UploadedFile instance
-
# file_actor = build_file_actor(relation)
-
# file_actor.ingest_file(wrapper!(file: file, relation: relation))
-
# # Copy visibility and permissions from parent (work) to
-
# # FileSets even if they come in from BrowseEverything
-
# VisibilityCopyJob.perform_later(file_set.parent)
-
# InheritPermissionsJob.perform_later(file_set.parent)
-
# else
-
# IngestJob.perform_later(wrapper!(file: file, relation: relation))
-
# end
-
io = JobIoWrapper.create_with_varied_file_handling!( user: user, file: file, relation: relation, file_set: file_set )
-
# FileActor#ingest_file(io)
-
# def ingest_file(io)
-
# Skip versioning because versions will be minted by VersionCommitter as necessary during save_characterize_and_record_committer.
-
Hydra::Works::AddFileToFileSet.call( file_set,
-
io,
-
relation,
-
versioning: false )
-
return false unless file_set.save
-
repository_file = related_file( file_set, relation )
-
Hyrax::VersioningService.create( repository_file, user )
-
virus_scan( file_set )
-
# pathhint = io.uploaded_file.uploader.path if io.uploaded_file # in case next worker is on same filesystem
-
# CharacterizeJob.perform_later(file_set, repository_file.id, pathhint || io.path)
-
characterize( file_set, repository_file.id, io.path )
-
end
-
end
-
-
# For the label, use the original_filename or original_name if it's there.
-
# If the file was imported via URL, parse the original filename.
-
# If all else fails, use the basename of the file where it sits.
-
# @note This is only useful for labeling the file_set, because of the recourse to import_url
-
1
def self.label_for( file )
-
if file.is_a?(Hyrax::UploadedFile) # filename not present for uncached remote file!
-
file.uploader.filename.present? ? file.uploader.filename : File.basename(Addressable::URI.parse(file.file_url).path)
-
elsif file.respond_to?(:original_name) # e.g. Hydra::Derivatives::IoDecorator
-
file.original_name
-
elsif file_set.import_url.present?
-
# This path is taken when file is a Tempfile (e.g. from ImportUrlJob)
-
File.basename(Addressable::URI.parse(file_set.import_url).path)
-
else
-
File.basename(file)
-
end
-
end
-
-
1
def self.related_file( file_set, relation )
-
file_set.public_send(relation) || raise("No #{relation} returned for FileSet #{file_set.id}")
-
end
-
-
# If this file_set is the thumbnail for the parent work,
-
# then the parent also needs to be reindexed.
-
1
def self.parent_needs_reindex?(file_set)
-
return false unless file_set.parent
-
file_set.parent.thumbnail_id == file_set.id
-
end
-
-
1
def self.perform_create_derivatives_job( file_set,
-
repository_file_id,
-
file_name,
-
file_path,
-
continue_job_chain: true,
-
continue_job_chain_later: true,
-
current_user: IngestHelper.current_user,
-
delete_input_file: true,
-
uploaded_file_ids: [],
-
**added_prov_key_values )
-
-
Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
Deepblue::LoggingHelper.called_from,
-
"file_set=#{file_set}",
-
"repository_file_id=#{repository_file_id}",
-
"file_name=#{file_name}",
-
"file_path=#{file_path}",
-
"continue_job_chain=#{continue_job_chain}",
-
"continue_job_chain_later=#{continue_job_chain_later}",
-
"current_user=#{current_user}",
-
"delete_input_file=#{delete_input_file}",
-
"uploaded_file_ids=#{uploaded_file_ids}",
-
"added_prov_key_values=#{added_prov_key_values}",
-
# "wrapper.methods=#{wrapper.methods.sort}",
-
"" ] if INGEST_HELPER_VERBOSE
-
if continue_job_chain
-
if continue_job_chain_later
-
# TODO: see about adding **added_prov_key_values to this:
-
CreateDerivativesJob.perform_later( file_set,
-
repository_file_id,
-
file_name,
-
current_user: current_user,
-
delete_input_file: delete_input_file,
-
uploaded_file_ids: uploaded_file_ids )
-
else
-
# CreateDerivativesJob.perform_now( file_set,
-
# repository_file_id,
-
# file_name,
-
# current_user: current_user,
-
# delete_input_file: delete_input_file,
-
# uploaded_file_ids: uploaded_file_ids )
-
create_derivatives( file_set,
-
repository_file_id,
-
file_name,
-
delete_input_file: delete_input_file,
-
current_user: current_user,
-
uploaded_file_ids: uploaded_file_ids,
-
**added_prov_key_values )
-
end
-
else
-
delete_file( file_path, delete_file_flag: delete_input_file, msg_prefix: 'Characterize ' )
-
end
-
end
-
-
1
def self.update_total_file_size( file_set, log_prefix: nil )
-
Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
Deepblue::LoggingHelper.called_from,
-
"file_set=#{file_set}",
-
"log_prefix=#{log_prefix}",
-
# "wrapper.methods=#{wrapper.methods.sort}",
-
"" ] if INGEST_HELPER_VERBOSE
-
# Rails.logger.info "begin IngestHelper.update_total_file_size"
-
# Rails.logger.debug "#{log_prefix} file_set.orginal_file.size=#{file_set.original_file.size}" unless log_prefix.nil?
-
# Rails.logger.info "nothing to update, parent is nil" if file_set.parent.nil?
-
return if file_set.parent.nil?
-
total = file_set.parent.total_file_size
-
if total.nil? || total.zero?
-
Rails.logger.debug "#{log_prefix}.file_set.parent.update_total_file_size!" unless log_prefix.nil?
-
file_set.parent.update_total_file_size!
-
else
-
Rails.logger.debug "#{log_prefix}.file_set.parent.total_file_size_add_file_set!" unless log_prefix.nil?
-
file_set.parent.total_file_size_add_file_set! file_set
-
end
-
Rails.logger.info "end IngestHelper.update_total_file_size"
-
rescue Exception => e # rubocop:disable Lint/RescueException
-
Rails.logger.error "IngestHelper.update_total_file_size(#{file_set}) #{e.class}: #{e.message} at #{e.backtrace[0]}"
-
end
-
-
1
def self.virus_scan( file_set )
-
LoggingHelper.bold_debug "IngestHelper.virus_scan #{file_set}"
-
file_set.virus_scan
-
rescue Exception => e # rubocop:disable Lint/RescueException
-
Rails.logger.error "IngestHelper.virus_scan(#{file_set}) #{e.class}: #{e.message} at #{e.backtrace[0]}"
-
end
-
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
module Deepblue
-
-
1
require 'jira-ruby'
-
-
1
module JiraHelper
-
1
extend ActionView::Helpers::TranslationHelper
-
-
1
FIELD_NAME_CONTACT_INFO = "customfield_11315".freeze
-
1
FIELD_NAME_CREATOR = "customfield_11304".freeze
-
1
FIELD_NAME_DEPOSIT_ID = "customfield_11303".freeze
-
1
FIELD_NAME_DEPOSIT_URL = "customfield_11305".freeze
-
1
FIELD_NAME_DESCRIPTION = "description".freeze
-
1
FIELD_NAME_DISCIPLINE = "customfield_11309".freeze
-
1
FIELD_NAME_STATUS = "customfield_12000".freeze
-
1
FIELD_NAME_SUMMARY = "summary".freeze
-
-
FIELD_VALUES_DISCIPLINE_MAP = {
-
1
"Arts" =>
-
[{
-
# "self" => "https://tools.lib.umich.edu/jira/rest/api/2/customFieldOption/11303",
-
"value" => "Arts",
-
"id" => "11303"
-
}],
-
"Business" =>
-
[{
-
# "self" => "https://tools.lib.umich.edu/jira/rest/api/2/customFieldOption/10820",
-
"value" => "Business",
-
"id" => "10820"
-
}],
-
"Engineering" =>
-
[{
-
# "self" => "https://tools.lib.umich.edu/jira/rest/api/2/customFieldOption/10821",
-
"value" => "Engineering",
-
"id" => "10821"
-
}],
-
"General Information Sources" =>
-
[{
-
# "self" => "https://tools.lib.umich.edu/jira/rest/api/2/customFieldOption/11304",
-
"value" => "General Information Sources",
-
"id" => "11304"
-
}],
-
"Government, Politics, and Law" =>
-
[{
-
# "self" => "https://tools.lib.umich.edu/jira/rest/api/2/customFieldOption/11305",
-
"value" => "Government, Politics, and Law",
-
"id" => "11305"
-
}],
-
"Health Sciences" =>
-
[{
-
# "self" => "https://tools.lib.umich.edu/jira/rest/api/2/customFieldOption/10822",
-
"value" => "Health Sciences",
-
"id" => "10822"
-
}],
-
"Humanities" =>
-
[{
-
# "self" => "https://tools.lib.umich.edu/jira/rest/api/2/customFieldOption/11306",
-
"value" => "Humanities",
-
"id" => "11306"
-
}],
-
"International Studies" =>
-
[{
-
# "self" => "https://tools.lib.umich.edu/jira/rest/api/2/customFieldOption/11307",
-
"value" => "International Studies",
-
"id" => "11307"
-
}],
-
"News and Current Events" =>
-
[{
-
# "self" => "https://tools.lib.umich.edu/jira/rest/api/2/customFieldOption/11308",
-
"value" => "News and Current Events",
-
"id" => "11308"
-
}],
-
"Science" =>
-
[{
-
# "self" => "https://tools.lib.umich.edu/jira/rest/api/2/customFieldOption/10824",
-
"value" => "Science",
-
"id" => "10824"
-
}],
-
"Social Sciences" =>
-
[{
-
# "self" => "https://tools.lib.umich.edu/jira/rest/api/2/customFieldOption/10825",
-
"value" => "Social Sciences",
-
"id" => "10825"
-
}],
-
"Other" =>
-
[{
-
# "self" => "https://tools.lib.umich.edu/jira/rest/api/2/customFieldOption/10823",
-
"value" => "Other",
-
"id" => "10823"
-
}]
-
}.freeze
-
-
1
def self.jira_enabled
-
DeepBlueDocs::Application.config.jira_integration_enabled
-
end
-
-
1
def self.jira_manager_issue_type
-
DeepBlueDocs::Application.config.jira_manager_issue_type
-
end
-
-
1
def self.jira_manager_project_key
-
DeepBlueDocs::Application.config.jira_manager_project_key
-
end
-
-
1
def self.jira_test_mode
-
DeepBlueDocs::Application.config.jira_test_mode
-
end
-
-
1
def self.summary_last_name( curation_concern: )
-
name = Array( curation_concern.creator ).first
-
return "" if name.blank?
-
match = name.match( /^([^,]+),.*$/ ) # first non-comma substring
-
return match[1] if match
-
match = name.match( /^.* ([^ ]+)$/ ) # last non-space substring
-
return match[1] if match
-
return name
-
end
-
-
1
def self.summary_description( curation_concern: )
-
description = Array( curation_concern.description ).first
-
return "" if description.blank?
-
match = description.match( /^([^ ]+) +([^ ]+) [^ ].*$/ ) # three plus words
-
return "#{match[1]}#{match[2]}" if match
-
match = description.match( /^([^ ]+) +([^ ]+)$/ ) # two words
-
return "#{match[1]}#{match[2]}" if match
-
match = description.match( /^[^ ]+$/ ) # one word
-
return description if match
-
return description
-
end
-
-
1
def self.summary_title( curation_concern: )
-
title = Array( curation_concern.title ).first
-
return "" if title.blank?
-
match = title.match( /^([^ ]+) +([^ ]+) [^ ].*$/ ) # three plus words
-
return "#{match[1]}#{match[2]}" if match
-
match = title.match( /^([^ ]+) +([^ ]+)$/ ) # two words
-
return "#{match[1]}#{match[2]}" if match
-
match = title.match( /^[^ ]+$/ ) # one word
-
return title if match
-
return title
-
end
-
-
1
def self.jira_ticket_for_create( curation_concern: )
-
::Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
Deepblue::LoggingHelper.called_from,
-
"curation_concern.id=#{curation_concern.id}",
-
"" ]
-
-
# Issue type: Data deposit
-
#
-
# * issue type field name: "issuetype"
-
#
-
# Status: New
-
#
-
# * status field name: "customfield_12000"
-
#
-
# Summary: [Depositor last name] _ [First two words of deposit] _ [deposit ID] - e.g., Nasser_BootAcceleration_n583xv03w
-
#
-
# * summary field name: "summary"
-
#
-
# Requester/contact: "Creator", "Contact information" fields in DBD - e.g., Meurer, William wmeurer@med.umich.edu
-
#
-
# * creator field name: "customfield_11304"
-
# * contact information field name: "customfield_11315"
-
#
-
# Unique Identifier: Deposit ID (from deposit URL) - e.g., n583xv03w
-
#
-
# * unique identifier field name: "customfield_11303"
-
#
-
# URL in Deep Blue Data: Deposit URL - e.g., https://deepblue.lib.umich.edu/data/concern/data_sets/4x51hj04n
-
#
-
# * deposit url field name: "customfield_11305"
-
#
-
# Description: "Title of deposit" - e.g., Effect of financial incentives on head CT use dataset"
-
#
-
# * description field name: "description"
-
#
-
# Discipline: "Discipline" field in DBD - e.g., Health sciences
-
#
-
# * discipline field name: "customfield_11309"
-
#
-
summary_title = summary_title( curation_concern: curation_concern )
-
summary_last_name = summary_last_name( curation_concern: curation_concern )
-
summary = "#{summary_last_name}_#{summary_title}_#{curation_concern.id}"
-
-
contact_info = curation_concern.authoremail
-
creator = Array( curation_concern.creator ).first
-
deposit_id = curation_concern.id
-
deposit_url = ::Deepblue::EmailHelper.data_set_url( data_set: curation_concern )
-
discipline = Array( curation_concern.subject_discipline ).first
-
description = Array( curation_concern.title ).join("\n") + "\n\nby #{creator}"
-
-
::Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
Deepblue::LoggingHelper.called_from,
-
Deepblue::LoggingHelper.obj_class( 'class', self ),
-
"summary=#{summary}",
-
"description=#{description}",
-
"" ]
-
jira_url = JiraHelper.new_ticket( contact_info: contact_info,
-
deposit_id: deposit_id,
-
deposit_url: deposit_url,
-
description: description,
-
discipline: discipline,
-
summary: summary )
-
::Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
Deepblue::LoggingHelper.called_from,
-
"jira_url=#{jira_url}",
-
"" ]
-
-
return if jira_url.nil?
-
return unless curation_concern.respond_to? :curation_notes_admin
-
::Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
Deepblue::LoggingHelper.called_from,
-
"curation_concern.curation_notes_admin=#{curation_concern.curation_notes_admin}",
-
"" ]
-
curation_concern.date_modified = DateTime.now # touch it so it will save updated attributes
-
notes = curation_concern.curation_notes_admin
-
notes = [] if notes.nil?
-
curation_concern.curation_notes_admin = notes << "Jira ticket: #{jira_url}"
-
curation_concern.save!
-
end
-
-
1
def self.new_ticket( project_key: jira_manager_project_key,
-
issue_type: jira_manager_issue_type,
-
contact_info:,
-
deposit_id:,
-
deposit_url:,
-
description:,
-
discipline:,
-
summary: )
-
::Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
Deepblue::LoggingHelper.called_from,
-
Deepblue::LoggingHelper.obj_class( 'class', self ),
-
"summary=#{summary}",
-
"project_key=#{project_key}",
-
"issue_type=#{issue_type}",
-
"description=#{description}",
-
"jira_enabled=#{jira_enabled}",
-
"" ]
-
return nil unless jira_enabled
-
client_options = {
-
:username => Settings.jira.username,
-
:password => Settings.jira.password,
-
:site => Settings.jira.site_url,
-
# :site => 'https://tools.lib.umich.edu',
-
:context_path => '/jira',
-
:auth_type => :basic
-
}
-
save_options = {
-
"fields" => {
-
"project" => { "key" => project_key },
-
"issuetype" => { "name" => issue_type },
-
FIELD_NAME_CONTACT_INFO => contact_info,
-
FIELD_NAME_DEPOSIT_ID => deposit_id,
-
FIELD_NAME_DEPOSIT_URL => deposit_url,
-
FIELD_NAME_DESCRIPTION => description,
-
FIELD_NAME_DISCIPLINE => discipline,
-
FIELD_NAME_SUMMARY => summary }
-
}
-
::Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
Deepblue::LoggingHelper.called_from,
-
"client_options=#{client_options}",
-
"save_options=#{save_options}",
-
"" ]
-
-
return "https://test.jira.url/#{project_key}" if jira_test_mode
-
# return nil if jira_test_mode
-
-
client = JIRA::Client.new( client_options )
-
# issue = client.Issue.build
-
# rv = issue.save( save_options )
-
# ::Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
# Deepblue::LoggingHelper.called_from,
-
# "issue.save rv=#{rv}",
-
# "" ]
-
build_options = {
-
"fields" => {
-
FIELD_NAME_SUMMARY => summary,
-
"project" => { "key" => project_key },
-
"issuetype" => { "name" => issue_type },
-
FIELD_NAME_DESCRIPTION => description }
-
}
-
issue = client.Issue.build
-
rv = issue.save( build_options )
-
::Deepblue::LoggingHelper.bold_debug( [ Deepblue::LoggingHelper.here,
-
Deepblue::LoggingHelper.called_from,
-
"issue.save( #{build_options} ) rv=#{rv}",
-
"" ] ) unless rv
-
sopts = { "fields" => { FIELD_NAME_CONTACT_INFO => contact_info } }
-
rv = issue.save( sopts )
-
::Deepblue::LoggingHelper.bold_debug( [ Deepblue::LoggingHelper.here,
-
Deepblue::LoggingHelper.called_from,
-
"issue.save( #{sopts} ) rv=#{rv}",
-
"" ] ) unless rv
-
sopts = { "fields" => { FIELD_NAME_DEPOSIT_ID => deposit_id } }
-
rv = issue.save( sopts )
-
::Deepblue::LoggingHelper.bold_debug( [ Deepblue::LoggingHelper.here,
-
Deepblue::LoggingHelper.called_from,
-
"issue.save( #{sopts} ) rv=#{rv}",
-
"" ] ) unless rv
-
sopts = { "fields" => { FIELD_NAME_DEPOSIT_URL => deposit_url } }
-
rv = issue.save( sopts )
-
::Deepblue::LoggingHelper.bold_debug( [ Deepblue::LoggingHelper.here,
-
Deepblue::LoggingHelper.called_from,
-
"issue.save( #{sopts} ) rv=#{rv}",
-
"" ] ) unless rv
-
sopts = { "fields" => { FIELD_NAME_DISCIPLINE => FIELD_VALUES_DISCIPLINE_MAP[discipline] } }
-
rv = issue.save( sopts )
-
::Deepblue::LoggingHelper.bold_debug( [ Deepblue::LoggingHelper.here,
-
Deepblue::LoggingHelper.called_from,
-
"issue.save( #{sopts} ) rv=#{rv}",
-
"" ] ) unless rv
-
# if rv is false, the save failed.
-
url = ticket_url( client: client, issue: issue )
-
::Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
Deepblue::LoggingHelper.called_from,
-
"url=#{url}",
-
"" ]
-
return url
-
end
-
-
1
def self.ticket_url( client:, issue: )
-
issue_key = issue.key if issue.respond_to? :key
-
"#{client.options[:site]}#{client.options[:context_path]}/browse/#{issue.key}"
-
end
-
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
module Deepblue
-
-
1
class LogParseError < RuntimeError
-
end
-
-
1
module JsonLoggerHelper
-
-
1
TIMESTAMP_FORMAT = '\d\d\d\d\-\d\d\-\d\d \d\d:\d\d:\d\d'.freeze
-
1
RE_TIMESTAMP_FORMAT = Regexp.compile "^#{TIMESTAMP_FORMAT}$".freeze
-
# Format: Date Timestamp Event/Event_detail_possibly_empty/ClassName/ID_possibly_empty Rest_in_form_of_JSON_hash
-
1
RE_LOG_LINE = Regexp.compile "^(#{TIMESTAMP_FORMAT}) ([^/]+)/([^/]*)/([^/]+)/([^/ ]*) (.*)$".freeze
-
1
PREFIX_UPDATE_ATTRIBUTE = 'UpdateAttribute_'.freeze
-
-
1
module ClassMethods
-
-
1
def extract_embargo_form_values( curation_concern:, update_key_prefix:, form_params: )
-
::Deepblue::LoggingHelper.bold_debug [ ::Deepblue::LoggingHelper.here,
-
::Deepblue::LoggingHelper.called_from,
-
::Deepblue::LoggingHelper.obj_class( "curation_concern", curation_concern ),
-
"curation_concern.id=#{curation_concern.id}",
-
"update_key_prefix=#{update_key_prefix}",
-
"form_params=#{form_params}",
-
"" ]
-
embargo_values = {}
-
key = "embargo_release_date"
-
new_value = form_params[key]
-
old_value = curation_concern.embargo_release_date if curation_concern.respond_to? :embargo_release_date
-
update_key = "#{update_key_prefix}#{key}".to_sym
-
embargo_values[update_key] = form_update_attribute( key: :embargo_release_date,
-
old_value: old_value,
-
new_value: new_value )
-
-
key = "visibility_during_embargo"
-
new_value = form_params[key]
-
old_value = curation_concern.visibility_during_embargo if curation_concern.respond_to? :visibility_during_embargo
-
update_key = "#{update_key_prefix}#{key}".to_sym
-
embargo_values[update_key] = form_update_attribute( key: :visibility_during_embargo,
-
old_value: old_value,
-
new_value: new_value )
-
-
key = "visibility_after_embargo"
-
new_value = form_params[key]
-
old_value = curation_concern.visibility_after_embargo if curation_concern.respond_to? :visibility_after_embargo
-
update_key = "#{update_key_prefix}#{key}".to_sym
-
embargo_values[update_key] = form_update_attribute( key: :visibility_after_embargo,
-
old_value: old_value,
-
new_value: new_value )
-
-
embargo_values
-
end
-
-
1
def form_update_attribute( key:, old_value:, new_value: )
-
4
old_value = ActiveSupport::JSON.encode old_value
-
4
old_value = ActiveSupport::JSON.decode old_value
-
4
attr = { attribute: key, old_value: old_value, new_value: new_value }
-
4
attr
-
end
-
-
1
def form_params_to_update_attribute_key_values( curation_concern:,
-
form_params:,
-
update_key_prefix: PREFIX_UPDATE_ATTRIBUTE,
-
delta_only: true )
-
-
4
attr_key_values = {}
-
4
return attr_key_values if form_params.nil?
-
4
embargo_values = nil
-
4
form_params.each_pair do |key, value|
-
102
update_key = "#{update_key_prefix}#{key}".to_sym
-
102
key = key.to_sym
-
102
has_old_value = case key
-
when :visibility
-
4
old_value = curation_concern.visibility
-
embargo_values = extract_embargo_form_values( curation_concern: curation_concern,
-
update_key_prefix: update_key_prefix,
-
4
form_params: form_params ) if value == "embargo"
-
4
true
-
else
-
98
if curation_concern.has_attribute? key
-
58
old_value = curation_concern[key]
-
58
true
-
else
-
40
false
-
end
-
end
-
102
next unless has_old_value
-
62
if value.is_a? Array
-
30
if value.blank?
-
value = nil
-
30
elsif [''] == value
-
14
value = nil
-
16
elsif 1 < value.size
-
16
value.pop if '' == value.last
-
end
-
end
-
# old_value = curation_concern[key]
-
62
new_value = nil
-
62
if delta_only
-
62
unless old_value.blank? && value.blank?
-
# old_value = ActiveSupport::JSON.encode old_value
-
# old_value = ActiveSupport::JSON.decode old_value
-
# attr_key_values[update_key] = { key: key, old_value: old_value, new_value: value } unless old_value == value
-
32
new_value = value unless old_value == value
-
end
-
else
-
# old_value = ActiveSupport::JSON.encode old_value
-
# old_value = ActiveSupport::JSON.decode old_value
-
# attr_key_values[update_key] = { key: key, old_value: old_value, new_value: value }
-
new_value = value
-
end
-
62
next if new_value.nil?
-
4
attr_key_values[update_key] = form_update_attribute( key: key, old_value: old_value, new_value: value )
-
end
-
4
attr_key_values[:embargo] = embargo_values if embargo_values.present?
-
4
attr_key_values
-
end
-
-
1
def logger_initialize_key_values( user_email:, event_note:, **added_key_values )
-
9
key_values = { user_email: user_email }
-
9
key_values.merge!( event_note: event_note ) if event_note.present?
-
9
key_values.merge!( added_key_values ) if added_key_values.present?
-
9
key_values
-
end
-
-
1
def logger_json_encode( value:, json_encode: true )
-
9
return value unless json_encode
-
begin
-
9
return ActiveSupport::JSON.encode value
-
rescue Exception => e # rubocop:disable Lint/RescueException
-
Rails.logger.error "#{e.class}: #{e.message} at #{e.backtrace[0]}"
-
return value.to_s unless value.respond_to? :each_pair
-
new_value = {}
-
value.each_pair do |key, val|
-
new_value[key] = logger_json_encode( value: val )
-
end
-
return ActiveSupport::JSON.encode new_value
-
end
-
end
-
-
1
def msg_to_log( class_name:,
-
event:,
-
event_note:,
-
id:,
-
timestamp:,
-
time_zone:,
-
json_encode: true,
-
**log_key_values )
-
9
if event_note.blank?
-
4
key_values = { event: event, timestamp: timestamp, time_zone: time_zone, class_name: class_name, id: id }
-
4
event += '/'
-
else
-
5
key_values = { event: event,
-
event_note: event_note,
-
timestamp: timestamp,
-
time_zone: time_zone,
-
class_name: class_name,
-
id: id }
-
5
event = "#{event}/#{event_note}"
-
end
-
9
key_values.merge! log_key_values
-
9
key_values = logger_json_encode(value: key_values, json_encode: json_encode )
-
9
"#{timestamp} #{event}/#{class_name}/#{id} #{key_values}"
-
end
-
-
1
def parse_log_line( line, line_number: nil, raw_key_values: false )
-
# line is of the form: "timestamp event/event_note/class_name/id key_values"
-
10
match = RE_LOG_LINE.match line
-
10
unless match
-
3
msg = "parse of log line failed: '#{line}'" if line_number.blank?
-
3
msg = "parse of log line failed at line #{line_number}: '#{line}'" if line_number.present?
-
3
raise LogParseError, msg
-
end
-
7
timestamp = match[1]
-
7
event = match[2]
-
7
event_note = match[3]
-
7
class_name = match[4]
-
7
id = match[5]
-
7
key_values = match[6]
-
7
key_values = parse_log_line_key_values key_values unless raw_key_values
-
7
return timestamp, event, event_note, class_name, id, key_values
-
end
-
-
1
def parse_log_line_key_values( key_values )
-
7
ActiveSupport::JSON.decode key_values
-
end
-
-
1
def system_as_current_user
-
1
"Deepblue"
-
end
-
-
1
def timestamp_now
-
4
Time.now.to_formatted_s(:db )
-
end
-
-
1
def timestamp_zone
-
2
DeepBlueDocs::Application.config.timezone_zone
-
end
-
-
1
def to_log_format_timestamp( timestamp )
-
21
is_a_string = timestamp.is_a?( String )
-
21
return timestamp if is_a_string && RE_TIMESTAMP_FORMAT =~ timestamp
-
20
timestamp = Time.parse( timestamp ) if is_a_string
-
20
timestamp = timestamp.to_formatted_s( :db ) if timestamp.is_a? Time
-
20
timestamp.to_s
-
end
-
-
1
def update_attribute_key_values( curation_concern:,
-
update_key_prefix: PREFIX_UPDATE_ATTRIBUTE,
-
**update_attr_key_values )
-
-
2
return nil if update_attr_key_values.blank?
-
2
new_update_attr_key_values = {}
-
2
key_values = update_attr_key_values
-
2
key_values = key_values[:update_attr_key_values] if key_values.key?( :update_attr_key_values )
-
# puts ActiveSupport::JSON.encode key_values
-
2
key_values.each_pair do |key, value|
-
# puts "#{key}:-#{value}"
-
2
next unless key.to_s.start_with? update_key_prefix
-
2
attribute = value[:attribute]
-
2
old_value = value[:old_value]
-
2
new_value = curation_concern_attribute( curation_concern: curation_concern, attribute: attribute )
-
# puts "#{attribute}, #{old_value}, #{new_value}"
-
new_update_attr_key_values[key] = { attribute: attribute,
-
old_value: old_value,
-
2
new_value: new_value } unless old_value == new_value
-
end
-
2
return new_update_attr_key_values
-
end
-
-
1
def curation_concern_attribute( curation_concern:, attribute: )
-
2
case attribute
-
when :visibility
-
curation_concern.visibility
-
else
-
2
curation_concern[attribute]
-
end
-
end
-
-
end
-
-
1
extend ClassMethods
-
-
1
def self.included( base )
-
2
base.extend( ClassMethods )
-
end
-
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
module Deepblue
-
-
1
module LoggingHelper
-
-
1
def self.bold_debug( msg = nil, label: nil, key_value_lines: true, lines: 1, &block )
-
22
lines = 1 unless lines.positive?
-
45
lines.times { Rails.logger.debug ">>>>>>>>>>" }
-
22
Rails.logger.debug label if label.present?
-
22
if msg.respond_to?( :each )
-
20
msg.each do |m|
-
151
if key_value_lines && m.respond_to?( :each_pair )
-
3
m.each_pair { |k, v| Rails.logger.debug "#{k}: #{v}" }
-
else
-
150
Rails.logger.debug m
-
end
-
end
-
20
Rails.logger.debug nil, &block if block_given?
-
else
-
2
Rails.logger.debug msg, &block
-
end
-
45
lines.times { Rails.logger.debug ">>>>>>>>>>" }
-
end
-
-
1
def self.called_from
-
15
"called from: #{caller_locations(1, 2)[1]}"
-
end
-
-
1
def self.caller
-
"#{caller_locations(1, 2)[1]}"
-
end
-
-
1
def self.debug( msg = nil, label: nil, key_value_lines: true, lines: 0, &block )
-
15
lines = 0 if lines.negative?
-
15
lines.times { Rails.logger.debug ">>>>>>>>>>" }
-
15
Rails.logger.debug label if label.present?
-
15
if msg.respond_to?( :each )
-
msg.each do |m|
-
if key_value_lines && m.respond_to?( :each_pair )
-
m.each_pair { |k, v| Rails.logger.debug "#{k}: #{v}" }
-
else
-
Rails.logger.debug m
-
end
-
end
-
Rails.logger.debug nil, &block if block_given?
-
else
-
15
Rails.logger.debug msg, &block
-
end
-
15
lines.times { Rails.logger.debug ">>>>>>>>>>" }
-
end
-
-
1
def self.here
-
15
"#{caller_locations(1, 1)[0]}"
-
end
-
-
1
def self.initialize_key_values( user_email:, event_note:, **added_key_values )
-
5
key_values = { user_email: user_email }
-
5
key_values.merge!( event_note: event_note ) if event_note.present?
-
5
key_values.merge!( added_key_values ) if added_key_values.present?
-
5
key_values
-
end
-
-
1
def self.log( class_name: 'UnknownClass',
-
event: 'unknown',
-
event_note: '',
-
id: 'unknown_id',
-
timestamp: LoggingHelper.timestamp_now,
-
time_zone: LoggingHelper.timestamp_zone,
-
echo_to_rails_logger: true,
-
logger: Rails.logger,
-
**key_values )
-
-
msg = msg_to_log( class_name: class_name,
-
event: event,
-
event_note: event_note,
-
id: id,
-
timestamp: timestamp,
-
time_zone: time_zone,
-
**key_values )
-
logger.info msg
-
Rails.logger.info msg if echo_to_rails_logger
-
end
-
-
1
def self.msg_to_log( class_name:,
-
event:,
-
event_note:,
-
id:, timestamp:,
-
time_zone:,
-
json_encode: true,
-
**added_key_values )
-
2
if event_note.blank?
-
1
key_values = { event: event, timestamp: timestamp, time_zone: time_zone, class_name: class_name, id: id }
-
1
event += '/'
-
else
-
1
key_values = { event: event,
-
event_note: event_note,
-
timestamp: timestamp,
-
time_zone: time_zone,
-
class_name: class_name,
-
id: id }
-
1
event = "#{event}/#{event_note}"
-
end
-
2
key_values.merge! added_key_values
-
2
key_values = ActiveSupport::JSON.encode key_values if json_encode
-
2
"#{timestamp} #{event}/#{class_name}/#{id} #{key_values}"
-
end
-
-
1
def self.obj_attribute_names( label, obj )
-
return "#{label}.attribute_names=N/A" unless obj.respond_to? :attribute_names
-
"#{label}.attribute_names=#{obj.attribute_names}"
-
end
-
-
1
def self.obj_class( label, obj )
-
9
"#{label}.class=#{obj.class.name}"
-
end
-
-
1
def self.obj_instance_variables( label, obj )
-
"#{label}.instance_variables=#{obj.instance_variables}"
-
end
-
-
1
def self.obj_methods( label, obj )
-
"#{label}.methods=#{obj.methods.sort}"
-
end
-
-
1
def self.obj_to_json( label, obj )
-
return "#{label}.to_json=N/A" unless obj.respond_to? :to_json
-
"#{label}.to_json=#{obj.to_json}"
-
end
-
-
1
def self.system_as_current_user
-
1
"Deepblue"
-
end
-
-
1
def self.timestamp_now
-
Time.now.to_formatted_s(:db )
-
end
-
-
1
def self.timestamp_zone
-
DeepBlueDocs::Application.config.timezone_zone
-
end
-
-
1
def self.to_log_format_timestamp( timestamp )
-
timestamp = Time.parse( timestamp ) if timestamp.is_a? String
-
timestamp = timestamp.to_formatted_s( :db ) if timestamp.is_a? Time
-
timestamp.to_s
-
end
-
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
module Deepblue
-
-
1
module MetadataHelper
-
-
1
SOURCE_DBDv1 = 'DBDv1' # rubocop:disable Style/ConstantName
-
1
SOURCE_DBDv2 = 'DBDv2' # rubocop:disable Style/ConstantName
-
1
DEFAULT_BASE_DIR = "/deepbluedata-prep/"
-
1
DEFAULT_SOURCE = SOURCE_DBDv2
-
1
DEFAULT_TASK = 'populate'
-
1
FIELD_SEP = '; '
-
1
HEADER_TYPE_COLLECTIONS = ':collections:'
-
1
HEADER_TYPE_USERS = ':users:'
-
1
HEADER_TYPE_WORKS = ':works:'
-
1
MODE_APPEND = 'append'
-
1
MODE_BUILD = 'build'
-
1
MODE_MIGRATE = 'migrate'
-
1
PREFIX_COLLECTION = 'c_'
-
1
PREFIX_USERS = 'users'
-
1
PREFIX_WORK = 'w_'
-
-
1
ATTRIBUTE_NAMES_ALWAYS_INCLUDE_CC = %w[ admin_set_id
-
authoremail
-
creator
-
creator_ordered
-
curation_notes_admin
-
curation_notes_admin_ordered
-
curation_notes_user
-
curation_notes_user_ordered
-
date_coverage
-
date_created
-
date_modified
-
date_published
-
date_uploaded
-
depositor
-
description
-
description_ordered
-
doi
-
fundedby
-
fundedby_other
-
grantnumber
-
isReferencedBy
-
isReferencedBy_ordered
-
keyword
-
keyword_ordered
-
language
-
language_ordered
-
methodology
-
owner
-
prior_identifier
-
referenced_by
-
referenced_by_ordered
-
rights_license_other
-
source
-
subject_discipline
-
title
-
title_ordered
-
tombstone
-
access_deepblue
-
access_deepblue_ordered
-
total_file_size ].freeze
-
1
ATTRIBUTE_NAMES_ALWAYS_INCLUDE_FILE_SET = %w[ creator
-
curation_notes_admin
-
curation_notes_admin_ordered
-
curation_notes_user
-
curation_notes_user_ordered
-
date_created
-
date_modified
-
date_uploaded
-
depositor
-
label
-
owner
-
prior_identifier
-
title
-
virus_scan_service
-
virus_scan_status
-
virus_scan_status_date ].freeze
-
1
ATTRIBUTE_NAMES_ALWAYS_INCLUDE_USER = %w[ id email ].freeze
-
1
ATTRIBUTE_NAMES_IGNORE = %w[ access_control_id
-
collection_type_gid
-
file_size
-
head
-
part_of tail
-
thumbnail_id ].freeze
-
1
ATTRIBUTE_NAMES_IGNORE_IMPORT = %w[ creator_ordered
-
curation_notes_admin_ordered
-
curation_notes_user_ordered
-
description_ordered
-
isReferencedBy_ordered
-
language_ordered
-
referenced_by_ordered
-
representative_id
-
resource_type
-
title_ordered
-
total_file_size ].freeze
-
1
ATTRIBUTE_NAMES_IGNORE_IMPORT_FILE_SET = %w[ description
-
file_size
-
file_size_human_readable
-
keyword
-
language
-
representative_id
-
resource_type
-
title ].freeze
-
1
ATTRIBUTE_NAMES_MAP_V1_V2 = { 'isReferencedBy': 'referenced_by',
-
'rights': 'rights_license',
-
'subject': 'subject_discipline' }.freeze
-
1
ATTRIBUTE_NAMES_MAP_V2_V1 = {}.freeze
-
1
ATTRIBUTE_NAMES_USER_IGNORE = %w[ current_sign_in_at
-
current_sign_in_ip
-
reset_password_token
-
reset_password_sent_at ].freeze
-
# encrypted_password
-
-
1
def self.attribute_names_always_include_cc
-
@@attribute_names_always_include ||= init_attribute_names_always_include_cc
-
end
-
-
1
def self.attribute_names_collection
-
@@attribute_names_collection ||= Collection.attribute_names.sort
-
end
-
-
1
def self.attribute_names_file_set
-
@@attribute_names_file_set ||= FileSet.attribute_names.sort
-
end
-
-
1
def self.attribute_names_user
-
@@attribute_names_user ||= User.attribute_names.sort
-
end
-
-
1
def self.attribute_names_work( source: )
-
if source == SOURCE_DBDv2
-
DataSet.attribute_names.sort
-
else
-
GenericWork.attribute_names.sort
-
end
-
end
-
-
1
def self.init_attribute_names_always_include_cc
-
rv = {}
-
ATTRIBUTE_NAMES_ALWAYS_INCLUDE_CC.each { |name| rv[name] = true }
-
return rv
-
end
-
-
1
def self.file_from_file_set( file_set )
-
file = nil
-
files = file_set.files
-
unless files.nil? || files.size.zero?
-
file = files[0]
-
files.each do |f|
-
file = f unless f.original_name.empty?
-
end
-
end
-
return file
-
end
-
-
1
def self.human_readable_size( value )
-
value = value.to_i
-
return ActiveSupport::NumberHelper::NumberToHumanSizeConverter.convert( value, precision: 3 )
-
end
-
-
1
def self.log_lines( filename, *lines )
-
File.open( filename, "a" ) do |f|
-
lines.each { |line| f.puts line }
-
end
-
end
-
-
1
def self.log_provenance_migrate( curation_concern:, parent: nil, migrate_direction: 'export', source: )
-
if source == SOURCE_DBDv1
-
msg = "Migrate #{migrate_direction} #{curation_concern.class.name} #{curation_concern.id}"
-
msg += " parent_id: #{parent.id}" if parent.present?
-
PROV_LOGGER.info( msg )
-
else
-
return unless curation_concern.respond_to? :provenance_migrate
-
parent_id = nil
-
parent_id = parent.id if parent.present?
-
curation_concern.provenance_migrate( current_user: nil,
-
parent_id: parent_id,
-
migrate_direction: migrate_direction )
-
end
-
end
-
-
1
def self.metadata_filename_collection( pathname_dir, collection )
-
pathname_dir.join "w_#{collection.id}_metadata_report.txt"
-
end
-
-
1
def self.metadata_filename_collection_work( pathname_dir, collection, work )
-
pathname_dir.join "c_#{collection.id}_w_#{work.id}_metadata_report.txt"
-
end
-
-
1
def self.metadata_filename_work( pathname_dir, work )
-
pathname_dir.join "w_#{work.id}_metadata_report.txt"
-
end
-
-
1
def self.metadata_multi_valued?( attribute_value )
-
return false if attribute_value.blank?
-
return true if attribute_value.respond_to?( :each ) && 1 < attribute_value.size
-
false
-
end
-
-
1
def self.ordered( ordered_values: nil, values: nil )
-
3137
return nil if values.nil?
-
3137
if DeepBlueDocs::Application.config.do_ordered_list_hack
-
3137
unless ordered_values.nil?
-
begin
-
467
values = OrderedStringHelper.deserialize( ordered_values )
-
rescue OrderedStringHelper::DeserializeError
-
# fallback to original values, which are stored in an unspecified order
-
return values
-
end
-
end
-
end
-
3137
return values
-
end
-
-
1
def self.ordered_values( ordered_values: nil, values: nil )
-
88
return nil if values.nil?
-
88
rv = nil
-
88
if DeepBlueDocs::Application.config.do_ordered_list_hack
-
88
if DeepBlueDocs::Application.config.do_ordered_list_hack_save
-
88
rv = OrderedStringHelper.serialize( values )
-
elsif !ordered_values.nil?
-
rv = OrderedStringHelper.serialize( values )
-
end
-
end
-
88
return rv
-
end
-
-
1
def self.report_collection( collection, dir: nil, out: nil, depth: '==' )
-
target_file = nil
-
if out.nil?
-
target_file = metadata_filename_collection( dir, collection )
-
open( target_file, 'w' ) do |out2|
-
report_collection( collection, out: out2, depth: depth )
-
end
-
else
-
title = report_title( collection, field_sep: '' )
-
out.puts "#{depth} Collection: #{title} #{depth}"
-
report_item( out, "ID: ", collection.id )
-
report_item( out, "Title: ", collection.title, one_line: true )
-
report_item( out, "Total items: ", collection.member_objects.count )
-
report_item( out, "Total size: ", human_readable_size( collection.bytes ) )
-
report_item( out, "Creator: ", collection.creator, one_line: false, item_prefix: "\t" )
-
report_item( out, "Keyword: ", collection.keyword, one_line: false, item_prefix: "\t" )
-
report_item( out, "Discipline: ", collection.subject_discipline, one_line: false, item_prefix: "\t" )
-
report_item( out, "Language: ", collection.language )
-
report_item( out, "Citation to related material: ", collection.referenced_by )
-
report_item( out, "Visibility: ", collection.visibility )
-
if collection.member_objects.count.positive?
-
collection.member_objects.each do |work|
-
out.puts
-
report_work(work, out: out, depth: "=#{depth}" )
-
end
-
end
-
end
-
return target_file
-
end
-
-
1
def self.report_collection_work( collection, work, dir: nil, out: nil, depth: '==' )
-
target_file = nil
-
if out.nil?
-
target_file = metadata_filename_collection_work( dir, collection, work )
-
open( target_file, 'w' ) do |out2|
-
report_collection_work( collection, work, out: out2, depth: depth )
-
end
-
else
-
report_work( work, out: out, depth: depth )
-
end
-
return target_file
-
end
-
-
1
def self.report_file_set( file_set, out: nil, depth: '==' )
-
out.puts "#{depth} File Set: #{file_set.label} #{depth}"
-
report_item( out, "ID: ", file_set.id )
-
report_item( out, "File name: ", file_set.label )
-
report_item( out, "Date uploaded: ", file_set.date_uploaded )
-
report_item( out, "Date modified: ", file_set.date_modified )
-
report_item( out, "Total file size: ", human_readable_size( file_set.file_size[0] ) )
-
report_item( out, "Checksum: ", file_set.original_checksum )
-
report_item( out, "Mimetype: ", file_set.mime_type )
-
end
-
-
1
def self.report_work( work, dir: nil, out: nil, depth: '==' )
-
target_file = nil
-
if out.nil?
-
target_file = metadata_filename_work( dir, work )
-
open( target_file, 'w' ) do |out2|
-
report_work(work, out: out2, depth: depth )
-
end
-
else
-
title = report_title( work, field_sep: '' )
-
out.puts "#{depth} Generic Work: #{title} #{depth}"
-
report_item( out, "ID: ", work.id )
-
report_item( out, "Title: ", work.title, one_line: true )
-
report_item( out, "Prior Identifier: ", work.prior_identifier, one_line: true )
-
report_item( out, "Methodology: ", work.methodology )
-
report_item( out, "Description: ", work.description, one_line: false, item_prefix: "\t" )
-
report_item( out, "Creator: ", work.creator, one_line: false, item_prefix: "\t" )
-
report_item( out, "Depositor: ", work.depositor )
-
report_item( out, "Contact: ", work.authoremail )
-
report_item( out, "Discipline: ", work.subject_discipline, one_line: false, item_prefix: "\t" )
-
report_item( out, "Funded by: ", work.fundedby )
-
report_item( out, "Funded by Other: ", work.fundedby_other ) if report_source == SOURCE_DBDv2
-
report_item( out, "ORSP Grant Number: ", work.grantnumber )
-
report_item( out, "Keyword: ", work.keyword, one_line: false, item_prefix: "\t" )
-
report_item( out, "Date coverage: ", work.date_coverage )
-
report_item( out, "Citation to related material: ", work.referenced_by )
-
report_item( out, "Language: ", work.language )
-
report_item( out, "Total file count: ", work.file_set_ids.count )
-
report_item( out, "Total file size: ", human_readable_size( work.total_file_size ) )
-
report_item( out, "DOI: ", work.doi, optional: true )
-
report_item( out, "Visibility: ", work.visibility )
-
report_item( out, "Rights: ", work.rights_license )
-
report_item( out, "Rights (other): ", work.rights_license_other ) if report_source == SOURCE_DBDv2
-
report_item( out, "Admin set id: ", work.admin_set_id )
-
report_item( out, "Tombstone: ", work.tombstone, optional: true )
-
if work.file_sets.count.positive?
-
work.file_sets.each do |file_set|
-
out.puts
-
report_file_set( file_set, out: out, depth: "=#{depth}" )
-
end
-
end
-
end
-
return target_file
-
end
-
-
1
def self.report_item( out,
-
label,
-
value,
-
item_prefix: '',
-
item_postfix: '',
-
item_seperator: FIELD_SEP,
-
one_line: nil,
-
optional: false )
-
multi_item = value.respond_to?( :count ) && value.respond_to?( :each )
-
if optional
-
return if value.nil?
-
return if value.to_s.empty?
-
return if multi_item && value.count.zero?
-
end
-
if one_line.nil?
-
one_line = true
-
if multi_item
-
one_line = false if 1 < value.count
-
end
-
end
-
if one_line
-
if value.respond_to?( :join )
-
out.puts( "#{label}#{item_prefix}#{value.join( "#{item_prefix}#{item_seperator}#{item_postfix}" )}#{item_postfix}" )
-
elsif multi_item
-
out.print( label.to_s )
-
count = 0
-
value.each do |item|
-
count += 1
-
out.print( "#{item_prefix}#{item}#{item_postfix}" )
-
out.print( item_seperator.to_s ) unless value.count == count
-
end
-
out.puts
-
else
-
out.puts( "#{label}#{item_prefix}#{value}#{item_postfix}" )
-
end
-
else
-
out.puts( label.to_s )
-
if multi_item
-
value.each { |item| out.puts( "#{item_prefix}#{item}#{item_postfix}" ) }
-
else
-
out.puts( "#{item_prefix}#{value}#{item_postfix}" )
-
end
-
end
-
end
-
-
1
def self.report_source
-
SOURCE_DBDv2
-
end
-
-
1
def self.report_title( curation_concern, field_sep: FIELD_SEP )
-
curation_concern.title.join( field_sep )
-
end
-
-
1
def self.yaml_body_collections( out, indent:, curation_concern:, source: )
-
yaml_item( out, indent, ":id:", curation_concern.id )
-
if source == SOURCE_DBDv2
-
yaml_item( out, indent, ":collection_type:", curation_concern.collection_type.machine_id, escape: true )
-
# yaml_item( out, indent, ":collection_type_gid:", curation_concern.collection_type_gid, escape: true )
-
end
-
# yaml_item( out, indent, ":creator:", curation_concern.creator, escape: true )
-
# yaml_item( out, indent, ":date_created:", curation_concern.date_created )
-
# yaml_item( out, indent, ":date_modified:", curation_concern.date_modified )
-
# yaml_item( out, indent, ":description:", curation_concern.description, escape: true )
-
# yaml_item( out, indent, ":depositor:", curation_concern.depositor )
-
# yaml_item( out, indent, ":doi:", curation_concern.doi, escape: true )
-
yaml_item( out, indent, ":edit_users:", curation_concern.edit_users, escape: true )
-
# yaml_item( out, indent, ':keyword:', curation_concern.keyword, escape: true )
-
# yaml_item( out, indent, ":language:", curation_concern.language, escape: true )
-
yaml_item_prior_identifier( out, indent, curation_concern: curation_concern, source: source )
-
# yaml_item_referenced_by( out, indent, curation_concern: curation_concern, source: source )
-
yaml_item_subject( out, indent, curation_concern: curation_concern, source: source )
-
# yaml_item( out, indent, ':title:', curation_concern.title, escape: true )
-
# yaml_item( out, indent, ":tombstone:", curation_concern.tombstone, single_value: true )
-
yaml_item( out, indent, ":total_work_count:", curation_concern.work_ids.count )
-
yaml_item( out, indent, ":total_file_size:", curation_concern.total_file_size )
-
yaml_item( out,
-
indent,
-
":total_file_size_human_readable:",
-
human_readable_size( curation_concern.total_file_size ),
-
escape: true )
-
yaml_item( out, indent, ":visibility:", curation_concern.visibility )
-
skip = %w[ prior_identifier rights rights_license subject subject_discipline total_file_size ]
-
attribute_names_collection.each do |name|
-
next if skip.include? name
-
yaml_item_collection( out, indent, curation_concern, name: name )
-
end
-
end
-
-
# def self.yaml_body_collections2( out, indent:, curation_concern:, source: )
-
# yaml_item( out, indent, ":id:", curation_concern.id )
-
# if source == SOURCE_DBDv2
-
# yaml_item( out, indent, ":collection_type:", curation_concern.collection_type.machine_id, escape: true )
-
# yaml_item( out, indent, ":collection_type_gid:", curation_concern.collection_type_gid, escape: true )
-
# end
-
# yaml_item( out, indent, ":creator:", curation_concern.creator, escape: true )
-
# yaml_item( out, indent, ":date_created:", curation_concern.date_created )
-
# yaml_item( out, indent, ":date_modified:", curation_concern.date_modified )
-
# yaml_item( out, indent, ":description:", curation_concern.description, escape: true )
-
# yaml_item( out, indent, ":depositor:", curation_concern.depositor )
-
# yaml_item( out, indent, ":doi:", curation_concern.doi, escape: true )
-
# yaml_item( out, indent, ":edit_users:", curation_concern.edit_users, escape: true )
-
# yaml_item( out, indent, ':keyword:', curation_concern.keyword, escape: true )
-
# yaml_item( out, indent, ":language:", curation_concern.language, escape: true )
-
# yaml_item_prior_identifier( out, indent, curation_concern: curation_concern, source: source )
-
# yaml_item_referenced_by( out, indent, curation_concern: curation_concern, source: source )
-
# yaml_item_subject( out, indent, curation_concern: curation_concern, source: source )
-
# yaml_item( out, indent, ':title:', curation_concern.title, escape: true )
-
# yaml_item( out, indent, ":tombstone:", curation_concern.tombstone, single_value: true )
-
# yaml_item( out, indent, ":total_work_count:", curation_concern.work_ids.count )
-
# yaml_item( out, indent, ":total_file_size:", curation_concern.total_file_size )
-
# yaml_item( out,
-
# indent,
-
# ":total_file_size_human_readable:",
-
# human_readable_size( curation_concern.total_file_size ),
-
# escape: true )
-
# yaml_item( out, indent, ":visibility:", curation_concern.visibility )
-
# end
-
-
1
def self.yaml_body_files( out,
-
indent_base:,
-
indent:,
-
curation_concern:,
-
mode: MODE_BUILD,
-
source:,
-
target_dirname: )
-
-
indent_first_line = indent
-
yaml_line( out, indent_first_line, ':file_set_ids:' )
-
return unless curation_concern.file_sets.count.positive?
-
indent = indent_base + indent_first_line + "-"
-
curation_concern.file_sets.each do |file_set|
-
yaml_item( out, indent, '', file_set.id, escape: true )
-
end
-
curation_concern.file_sets.each do |file_set|
-
log_provenance_migrate( curation_concern: file_set, parent: curation_concern, source: source ) if MODE_MIGRATE == mode
-
file_id = ":#{yaml_file_set_id( file_set )}:"
-
yaml_line( out, indent_first_line, file_id )
-
indent = indent_base + indent_first_line
-
yaml_item( out, indent, ':id:', file_set.id, escape: true )
-
single_value = 1 == file_set.title.size
-
yaml_item( out, indent, ':title:', file_set.title, escape: true, single_value: single_value )
-
yaml_item_prior_identifier( out, indent, curation_concern: file_set, source: source )
-
file_path = yaml_export_file_path( target_dirname: target_dirname, file_set: file_set )
-
yaml_item( out, indent, ':file_path:', file_path.to_s, escape: true )
-
checksum = yaml_file_set_checksum( file_set: file_set )
-
yaml_item( out, indent, ":checksum_algorithm:", checksum.present? ? checksum.algorithm : '', escape: true )
-
yaml_item( out, indent, ":checksum_value:", checksum.present? ? checksum.value : '', escape: true )
-
yaml_item( out, indent, ":edit_users:", file_set.edit_users, escape: true )
-
file_size = if file_set.file_size.blank?
-
file_set.original_file.nil? ? 0 : file_set.original_file.size
-
else
-
file_set.file_size[0]
-
end
-
yaml_item( out, indent, ":file_size:", file_size )
-
yaml_item( out, indent, ":file_size_human_readable:", human_readable_size( file_size ), escape: true )
-
yaml_item( out, indent, ":mime_type:", file_set.mime_type, escape: true )
-
value = file_set.original_checksum.blank? ? '' : file_set.original_checksum[0]
-
yaml_item( out, indent, ":original_checksum:", value )
-
value = file_set.original_file.nil? ? nil : file_set.original_file.original_name
-
yaml_item( out, indent, ":original_name:", value, escape: true )
-
yaml_item( out, indent, ":visibility:", file_set.visibility )
-
skip = %w[ title file_size ]
-
attribute_names_file_set.each do |name|
-
next if skip.include? name
-
yaml_item_file_set( out, indent, file_set, name: name )
-
end
-
end
-
end
-
-
# def self.yaml_body_files2( out,
-
# indent_base:,
-
# indent:,
-
# curation_concern:,
-
# mode: MODE_BUILD,
-
# source:,
-
# target_dirname: )
-
#
-
# indent_first_line = indent
-
# yaml_line( out, indent_first_line, ':file_set_ids:' )
-
# return unless curation_concern.file_sets.count.positive?
-
# indent = indent_base + indent_first_line + "-"
-
# curation_concern.file_sets.each do |file_set|
-
# yaml_item( out, indent, '', file_set.id, escape: true )
-
# end
-
# curation_concern.file_sets.each do |file_set|
-
# log_provenance_migrate( curation_concern: file_set, parent: curation_concern, source: source ) if MODE_MIGRATE == mode
-
# file_id = ":#{yaml_file_set_id( file_set )}:"
-
# yaml_line( out, indent_first_line, file_id )
-
# indent = indent_base + indent_first_line
-
# yaml_item( out, indent, ':id:', file_set.id, escape: true )
-
# single_value = 1 == file_set.title.size
-
# yaml_item( out, indent, ':title:', file_set.title, escape: true, single_value: single_value )
-
# yaml_item_prior_identifier( out, indent, curation_concern: file_set, source: source )
-
# file_path = yaml_export_file_path( target_dirname: target_dirname, file_set: file_set )
-
# yaml_item( out, indent, ':file_path:', file_path.to_s, escape: true )
-
# checksum = yaml_file_set_checksum( file_set: file_set )
-
# yaml_item( out, indent, ":checksum_algorithm:", checksum.present? ? checksum.algorithm : '', escape: true )
-
# yaml_item( out, indent, ":checksum_value:", checksum.present? ? checksum.value : '', escape: true )
-
# yaml_item( out, indent, ":date_created:", file_set.date_created )
-
# yaml_item( out, indent, ":date_created:", file_set.date_created )
-
# yaml_item( out, indent, ":date_modified:", file_set.date_modified )
-
# yaml_item( out, indent, ":date_uploaded:", file_set.date_uploaded )
-
# yaml_item( out, indent, ":edit_users:", file_set.edit_users, escape: true )
-
# file_size = if file_set.file_size.blank?
-
# file_set.original_file.nil? ? 0 : file_set.original_file.size
-
# else
-
# file_set.file_size[0]
-
# end
-
# yaml_item( out, indent, ":file_size:", file_size )
-
# yaml_item( out, indent, ":file_size_human_readable:", human_readable_size( file_size ), escape: true )
-
# yaml_item( out, indent, ":label:", file_set.label, escape: true )
-
# yaml_item( out, indent, ":mime_type:", file_set.mime_type, escape: true )
-
# value = file_set.original_checksum.blank? ? '' : file_set.original_checksum[0]
-
# yaml_item( out, indent, ":original_checksum:", value )
-
# value = file_set.original_file.nil? ? nil : file_set.original_file.original_name
-
# yaml_item( out, indent, ":original_name:", value, escape: true )
-
# yaml_item( out, indent, ":visibility:", file_set.visibility )
-
# end
-
# end
-
-
1
def self.yaml_body_user_body( out, indent_base:, indent:, user: )
-
indent_first_line = indent
-
user_email = ":#{yaml_user_email( user )}:"
-
yaml_line( out, indent_first_line, user_email )
-
indent = indent_base + indent_first_line
-
yaml_item(out, indent, ':email:', user.email, escape: true )
-
skip = %w[ email ]
-
attribute_names_user.each do |name|
-
next if skip.include? name
-
yaml_item_user(out, indent, user, name: name )
-
end
-
end
-
-
1
def self.yaml_body_users( out, indent_base:, indent:, users: )
-
yaml_item( out, indent, ":total_user_count:", users.count )
-
indent_first_line = indent
-
yaml_line( out, indent_first_line, ':user_emails:' )
-
return unless users.count.positive?
-
indent = indent_base + indent_first_line + "-"
-
users.each do |user|
-
yaml_item( out, indent, '', user.email, escape: true )
-
end
-
end
-
-
1
def self.yaml_body_works( out, indent:, curation_concern:, source: )
-
yaml_item( out, indent, ":id:", curation_concern.id )
-
yaml_item( out, indent, ":admin_set_id:", curation_concern.admin_set_id, escape: true )
-
yaml_item( out, indent, ":edit_users:", curation_concern.edit_users, escape: true )
-
yaml_item_prior_identifier( out, indent, curation_concern: curation_concern, source: source )
-
yaml_item_rights( out, indent, curation_concern: curation_concern, source: source )
-
yaml_item_subject( out, indent, curation_concern: curation_concern, source: source )
-
yaml_item( out, indent, ":total_file_count:", curation_concern.file_set_ids.count )
-
yaml_item( out, indent, ":total_file_size:", curation_concern.total_file_size )
-
yaml_item( out,
-
indent,
-
":total_file_size_human_readable:",
-
human_readable_size( curation_concern.total_file_size ),
-
escape: true )
-
yaml_item( out, indent, ":visibility:", curation_concern.visibility )
-
skip = %w[ prior_identifier rights rights_license subject subject_discipline total_file_size ]
-
attribute_names_work( source: source ).each do |name|
-
next if skip.include? name
-
yaml_item_work( out, indent, curation_concern, name: name )
-
end
-
end
-
-
# def self.yaml_body_works2( out, indent:, curation_concern:, source: )
-
# yaml_item( out, indent, ":id:", curation_concern.id )
-
# yaml_item( out, indent, ":admin_set_id:", curation_concern.admin_set_id, escape: true )
-
# yaml_item( out, indent, ":authoremail:", curation_concern.authoremail )
-
# yaml_item( out, indent, ":creator:", curation_concern.creator, escape: true )
-
# yaml_item( out, indent, ":curation_notes_admin:", curation_concern.curation_notes_admin, escape: true ) if source == SOURCE_DBDv2
-
# yaml_item( out, indent, ":curation_notes_user:", curation_concern.curation_notes_user, escape: true ) if source == SOURCE_DBDv2
-
# yaml_item( out, indent, ":date_coverage:", curation_concern.date_coverage, single_value: true )
-
# yaml_item( out, indent, ":date_created:", curation_concern.date_created )
-
# yaml_item( out, indent, ":date_modified:", curation_concern.date_modified )
-
# yaml_item( out, indent, ":date_uploaded:", curation_concern.date_uploaded )
-
# yaml_item( out, indent, ":depositor:", curation_concern.depositor )
-
# yaml_item( out, indent, ":description:", curation_concern.description, escape: true )
-
# yaml_item( out, indent, ":doi:", curation_concern.doi, escape: true )
-
# yaml_item( out, indent, ":edit_users:", curation_concern.edit_users, escape: true )
-
# yaml_item( out, indent, ":fundedby:", curation_concern.fundedby, single_value: true, escape: true )
-
# yaml_item( out, indent, ":fundedby_other:", curation_concern.fundedby_other, single_value: true, escape: true ) if source == SOURCE_DBDv2
-
# yaml_item( out, indent, ":grantnumber:", curation_concern.grantnumber, escape: true )
-
# yaml_item_referenced_by( out, indent, curation_concern: curation_concern, source: source )
-
# yaml_item( out, indent, ':keyword:', curation_concern.keyword, escape: true )
-
# yaml_item( out, indent, ":language:", curation_concern.language, escape: true )
-
# yaml_item( out, indent, ":methodology:", curation_concern.methodology, escape: true )
-
# yaml_item_prior_identifier( out, indent, curation_concern: curation_concern, source: source )
-
# yaml_item_rights( out, indent, curation_concern: curation_concern, source: source )
-
# yaml_item( out, indent, ":rights_license_other: ", curation_concern.rights_license_other, escape: true ) if source == SOURCE_DBDv2
-
# yaml_item_subject( out, indent, curation_concern: curation_concern, source: source )
-
# yaml_item( out, indent, ':title:', curation_concern.title, escape: true )
-
# yaml_item( out, indent, ":tombstone:", curation_concern.tombstone, single_value: true )
-
# yaml_item( out, indent, ":total_file_count:", curation_concern.file_set_ids.count )
-
# yaml_item( out, indent, ":total_file_size:", curation_concern.total_file_size )
-
# yaml_item( out,
-
# indent,
-
# ":total_file_size_human_readable:",
-
# human_readable_size( curation_concern.total_file_size ),
-
# escape: true )
-
# yaml_item( out, indent, ":visibility:", curation_concern.visibility )
-
# end
-
-
1
def self.yaml_escape_value( value, comment: false, escape: false )
-
return "" if value.nil?
-
return value unless escape
-
return value if comment
-
value = value.to_json
-
return "" if "\"\"" == value
-
return value
-
end
-
-
1
def self.yaml_export_file_path( target_dirname:, file_set: )
-
file = file_from_file_set( file_set )
-
export_file_name = file.original_name
-
target_dirname.join "#{file_set.id}_#{export_file_name}"
-
end
-
-
1
def self.yaml_file_set_checksum( file_set: )
-
file = file_from_file_set( file_set )
-
return file.checksum if file.present?
-
return nil
-
end
-
-
1
def self.yaml_file_set_id( file_set )
-
"f_#{file_set.id}"
-
end
-
-
1
def self.yaml_filename( pathname_dir:, id:, prefix:, task: )
-
pathname_dir = Pathname.new pathname_dir unless pathname_dir.is_a? Pathname
-
pathname_dir.join "#{prefix}#{id}_#{task}.yml"
-
end
-
-
1
def self.yaml_filename_collection( pathname_dir:, collection:, task: DEFAULT_TASK )
-
yaml_filename( pathname_dir: pathname_dir, id: collection.id, prefix: PREFIX_COLLECTION, task: task )
-
end
-
-
1
def self.yaml_filename_users( pathname_dir:, task: DEFAULT_TASK )
-
yaml_filename( pathname_dir: pathname_dir, id: '', prefix: PREFIX_USERS, task: task )
-
end
-
-
1
def self.yaml_filename_work( pathname_dir:, work:, task: DEFAULT_TASK )
-
yaml_filename( pathname_dir: pathname_dir, id: work.id, prefix: PREFIX_WORK, task: task )
-
end
-
-
1
def self.yaml_header( out, indent:, curation_concern:, header_type:, source:, mode: )
-
yaml_line( out, indent, ':email:', curation_concern.depositor )
-
yaml_line( out, indent, ':visibility:', curation_concern.visibility )
-
yaml_line( out, indent, ':ingester:', '' )
-
yaml_line( out, indent, ':source:', source )
-
yaml_line( out, indent, ':export_timestamp:', DateTime.now.to_s )
-
yaml_line( out, indent, ':mode:', mode )
-
yaml_line( out, indent, ':id:', curation_concern.id )
-
yaml_line( out, indent, header_type )
-
end
-
-
1
def self.yaml_header_populate( out, indent:, rake_task: 'umrdr:populate', target_filename: )
-
yaml_line( out, indent, target_filename.to_s, comment: true )
-
yaml_line( out, indent, "bundle exec rake #{rake_task}[#{target_filename}]", comment: true )
-
yaml_line( out, indent, "---" )
-
yaml_line( out, indent, ':user:' )
-
end
-
-
1
def self.yaml_header_users( out, indent:, header_type: HEADER_TYPE_USERS, source:, mode: )
-
yaml_line( out, indent, ':ingester:', '' )
-
yaml_line( out, indent, ':source:', source )
-
yaml_line( out, indent, ':export_timestamp:', DateTime.now.to_s )
-
yaml_line( out, indent, ':mode:', mode )
-
yaml_line( out, indent, header_type )
-
end
-
-
1
def self.yaml_is_a_work?( curation_concern:, source: )
-
if source == SOURCE_DBDv2
-
curation_concern.is_a? DataSet
-
else
-
curation_concern.is_a? GenericWork
-
end
-
end
-
-
1
def self.yaml_item( out,
-
indent,
-
label,
-
value = '',
-
single_value: false,
-
comment: false,
-
indent_base: " ",
-
label_postfix: ' ',
-
escape: false )
-
-
indent = "# #{indent}" if comment
-
if single_value && value.present? && value.respond_to?( :each )
-
value = value[0]
-
out.puts "#{indent}#{label}#{label_postfix}#{yaml_escape_value( value, comment: comment, escape: escape )}"
-
elsif value.respond_to?(:each)
-
out.puts "#{indent}#{label}#{label_postfix}"
-
indent += indent_base
-
value.each { |item| out.puts "#{indent}- #{yaml_escape_value( item, comment: comment, escape: escape )}" }
-
else
-
out.puts "#{indent}#{label}#{label_postfix}#{yaml_escape_value( value, comment: comment, escape: escape )}"
-
end
-
end
-
-
1
def self.yaml_item_collection( out, indent, curation_concern, name: )
-
return if ATTRIBUTE_NAMES_IGNORE.include? name
-
label = ":#{name}:"
-
value = curation_concern[name]
-
return if value.blank? && !ATTRIBUTE_NAMES_ALWAYS_INCLUDE_CC.include?( name )
-
yaml_item( out, indent, label, value, escape: true )
-
end
-
-
1
def self.yaml_item_file_set( out, indent, file_set, name: )
-
return if ATTRIBUTE_NAMES_IGNORE.include? name
-
label = ":#{name}:"
-
value = file_set[name]
-
return if value.blank? && !ATTRIBUTE_NAMES_ALWAYS_INCLUDE_FILE_SET.include?( name )
-
yaml_item( out, indent, label, value, escape: true )
-
end
-
-
1
def self.yaml_item_prior_identifier( out, indent, curation_concern:, source: )
-
if source == SOURCE_DBDv1
-
yaml_item( out, indent, ":prior_identifier:", '' )
-
else
-
# ids = curation_concern.prior_identifier
-
# ids = [] if ids.nil?
-
# ids << curation_concern.id
-
# yaml_item( out, indent, ':prior_identifier:', ActiveSupport::JSON.encode( ids ) )
-
yaml_item( out, indent, ":prior_identifier:", curation_concern.prior_identifier )
-
end
-
end
-
-
1
def self.yaml_item_referenced_by( out, indent, curation_concern:, source: )
-
if source == SOURCE_DBDv1
-
yaml_item( out, indent, ":isReferencedBy:", curation_concern.isReferencedBy, escape: true )
-
else
-
yaml_item( out, indent, ":referenced_by:", curation_concern.referenced_by, escape: true )
-
end
-
end
-
-
1
def self.yaml_item_rights( out, indent, curation_concern:, source: )
-
if source == SOURCE_DBDv1
-
yaml_item( out, indent, ":rights:", curation_concern.rights, escape: true )
-
else
-
yaml_item( out, indent, ":rights_license:", curation_concern.rights_license, escape: true )
-
end
-
end
-
-
1
def self.yaml_item_subject( out, indent, curation_concern:, source: )
-
if source == SOURCE_DBDv1
-
yaml_item( out, indent, ":subject:", curation_concern.subject, escape: true )
-
else
-
yaml_item( out, indent, ":subject_discipline:", curation_concern.subject_discipline, escape: true )
-
end
-
end
-
-
1
def self.yaml_item_user( out, indent, user, name: )
-
return if ATTRIBUTE_NAMES_USER_IGNORE.include? name
-
label = ":#{name}:"
-
value = user[name]
-
return if value.blank? && !ATTRIBUTE_NAMES_ALWAYS_INCLUDE_USER.include?( name )
-
yaml_item( out, indent, label, value, escape: true )
-
end
-
-
1
def self.yaml_item_work( out, indent, curation_concern, name: )
-
return if ATTRIBUTE_NAMES_IGNORE.include? name
-
label = ":#{name}:"
-
value = curation_concern[name]
-
return if value.blank? && !ATTRIBUTE_NAMES_ALWAYS_INCLUDE_CC.include?( name )
-
yaml_item( out, indent, label, value, escape: true )
-
end
-
-
1
def self.yaml_line( out, indent, label, value = '', comment: false, label_postfix: ' ', escape: false )
-
indent = "# #{indent}" if comment
-
out.puts "#{indent}#{label}#{label_postfix}#{yaml_escape_value( value, comment: comment, escape: escape )}"
-
end
-
-
1
def self.yaml_populate_collection( collection:,
-
dir: DEFAULT_BASE_DIR,
-
out: nil,
-
populate_works: true,
-
export_files: true,
-
overwrite_export_files: true,
-
source: DEFAULT_SOURCE,
-
mode: MODE_BUILD,
-
target_filename: nil,
-
target_dirname: nil )
-
-
target_file = nil
-
dir = Pathname.new dir unless dir.is_a? Pathname
-
if out.nil?
-
collection = Collection.find collection if collection.is_a? String
-
target_file = yaml_filename_collection( pathname_dir: dir, collection: collection )
-
target_dir = yaml_targetdir_collection( pathname_dir: dir, collection: collection )
-
Dir.mkdir( target_dir ) unless Dir.exist? target_dir
-
open( target_file, 'w' ) do |out2|
-
yaml_populate_collection( collection: collection,
-
out: out2,
-
populate_works: populate_works,
-
export_files: false,
-
overwrite_export_files: overwrite_export_files,
-
source: source,
-
mode: mode,
-
target_filename: target_file,
-
target_dirname: target_dir )
-
end
-
if export_files
-
collection.member_objects.each do |work|
-
next unless yaml_is_a_work?( curation_concern: work, source: source )
-
yaml_work_export_files( work: work, target_dirname: target_dir, overwrite: overwrite_export_files )
-
end
-
end
-
else
-
log_provenance_migrate( curation_concern: collection, source: source ) if MODE_MIGRATE == mode
-
indent_base = " " * 2
-
indent = indent_base * 0
-
yaml_header_populate( out, indent: indent, target_filename: target_filename )
-
indent = indent_base * 1
-
yaml_header( out,
-
indent: indent,
-
curation_concern: collection,
-
header_type: HEADER_TYPE_COLLECTIONS,
-
source: source,
-
mode: mode )
-
indent = indent_base * 2
-
yaml_body_collections( out, indent: indent, curation_concern: collection, source: source )
-
return unless populate_works
-
return unless collection.member_objects.size.positive?
-
indent = indent_base * 2
-
yaml_line( out, indent, HEADER_TYPE_WORKS )
-
indent = indent_base + indent + "-"
-
collection.member_objects.each do |work|
-
next unless yaml_is_a_work?( curation_concern: work, source: source )
-
yaml_item( out, indent, '', work.id, escape: true )
-
end
-
indent = indent_base * 2
-
collection.member_objects.each do |work|
-
next unless yaml_is_a_work?( curation_concern: work, source: source )
-
indent = indent_base * 2
-
yaml_line( out, indent, ":works_#{work.id}:" )
-
indent = indent_base * 3
-
log_provenance_migrate( curation_concern: work, parent: collection, source: source ) if MODE_MIGRATE == mode
-
yaml_body_works( out, indent: indent, curation_concern: work, source: source )
-
yaml_body_files( out,
-
indent_base: indent_base,
-
indent: indent,
-
curation_concern: work,
-
mode: mode,
-
source: source,
-
target_dirname: target_dirname )
-
end
-
end
-
end
-
-
1
def self.yaml_populate_users( dir: DEFAULT_BASE_DIR,
-
out: nil,
-
source: DEFAULT_SOURCE,
-
mode: MODE_MIGRATE,
-
target_filename: nil )
-
-
target_file = nil
-
dir = Pathname.new dir unless dir.is_a? Pathname
-
Dir.mkdir( dir ) unless Dir.exist? dir
-
if out.nil?
-
target_file = yaml_filename_users( pathname_dir: dir, task: mode )
-
# target_dir = yaml_targetdir_users( pathname_dir: dir, task: mode )
-
# Dir.mkdir( target_dir ) unless Dir.exist? target_dir
-
open( target_file, 'w' ) do |out2|
-
yaml_populate_users( out: out2, source: source, mode: mode, target_filename: target_file )
-
end
-
else
-
# log_provenance_migrate( curation_concern: curation_concern, source: source ) if MODE_MIGRATE == mode
-
indent_base = " " * 2
-
indent = indent_base * 0
-
yaml_header_populate( out, indent: indent, rake_task: 'umrdr:populate_users', target_filename: target_filename )
-
indent = indent_base * 1
-
yaml_header_users( out, indent: indent, source: source, mode: mode )
-
indent = indent_base * 2
-
users = User.all
-
yaml_body_users( out, indent_base: indent_base, indent: indent, users: users )
-
users.each do |user|
-
yaml_body_user_body( out, indent_base: indent_base, indent: indent, user: user )
-
end
-
end
-
return target_file
-
end
-
-
1
def self.yaml_populate_work( curation_concern:,
-
dir: DEFAULT_BASE_DIR,
-
out: nil,
-
export_files: true,
-
overwrite_export_files: true,
-
source: DEFAULT_SOURCE,
-
mode: MODE_BUILD,
-
target_filename: nil,
-
target_dirname: nil )
-
-
target_file = nil
-
dir = Pathname.new dir unless dir.is_a? Pathname
-
if out.nil?
-
curation_concern = yaml_work_find( curation_concern: curation_concern, source: source ) if curation_concern.is_a? String
-
target_file = yaml_filename_work( pathname_dir: dir, work: curation_concern )
-
target_dir = yaml_targetdir_work( pathname_dir: dir, work: curation_concern )
-
Dir.mkdir( target_dir ) unless Dir.exist? target_dir
-
open( target_file, 'w' ) do |out2|
-
yaml_populate_work( curation_concern: curation_concern,
-
out: out2,
-
export_files: export_files,
-
overwrite_export_files: overwrite_export_files,
-
source: source,
-
mode: mode,
-
target_filename: target_file,
-
target_dirname: target_dir )
-
end
-
if export_files
-
yaml_work_export_files( work: curation_concern, target_dirname: target_dir, overwrite: overwrite_export_files )
-
end
-
else
-
log_provenance_migrate( curation_concern: curation_concern, source: source ) if MODE_MIGRATE == mode
-
indent_base = " " * 2
-
indent = indent_base * 0
-
yaml_header_populate( out, indent: indent, target_filename: target_filename )
-
indent = indent_base * 1
-
yaml_header( out,
-
indent: indent,
-
curation_concern: curation_concern,
-
header_type: HEADER_TYPE_WORKS,
-
source: source,
-
mode: mode )
-
indent = indent_base * 2
-
yaml_body_works( out, indent: indent, curation_concern: curation_concern, source: source )
-
yaml_body_files( out,
-
indent_base: indent_base,
-
indent: indent,
-
curation_concern: curation_concern,
-
mode: mode,
-
source: source,
-
target_dirname: target_dirname )
-
end
-
return target_file
-
end
-
-
1
def self.yaml_targetdir( pathname_dir:, id:, prefix:, task: )
-
pathname_dir = Pathname.new pathname_dir unless pathname_dir.is_a? Pathname
-
pathname_dir.join "#{prefix}#{id}_#{task}"
-
end
-
-
1
def self.yaml_targetdir_collection( pathname_dir:, collection:, task: DEFAULT_TASK )
-
yaml_targetdir( pathname_dir: pathname_dir, id: collection.id, prefix: PREFIX_COLLECTION, task: task )
-
end
-
-
1
def self.yaml_targetdir_users( pathname_dir:, task: DEFAULT_TASK )
-
yaml_targetdir( pathname_dir: pathname_dir, id: '', prefix: PREFIX_USERS, task: task )
-
end
-
-
1
def self.yaml_targetdir_work( pathname_dir:, work:, task: DEFAULT_TASK )
-
yaml_targetdir( pathname_dir: pathname_dir, id: work.id, prefix: PREFIX_WORK, task: task )
-
end
-
-
1
def self.yaml_user_email( user )
-
"user_#{user.email}"
-
end
-
-
1
def self.yaml_work_export_files( work:, target_dirname: nil, log_filename: nil, overwrite: true )
-
log_file = target_dirname.join ".export.log" if log_filename.nil?
-
open( log_file, 'w' ) { |f| f.write('') } # erase log file
-
start_time = Time.now
-
log_lines( log_file,
-
"Starting yaml work export of files at #{start_time} ...",
-
"Generic work id: #{work.id}",
-
"Total file count: #{work.file_sets.count}")
-
total_byte_count = 0
-
if work.file_sets.count.positive?
-
work.file_sets.each do |file_set|
-
export_file_name = yaml_export_file_path( target_dirname: target_dirname, file_set: file_set )
-
write_file = if overwrite
-
true
-
else
-
!File.exist?( export_file_name )
-
end
-
file = file_from_file_set( file_set )
-
export_what = "#{export_file_name} (#{human_readable_size(file.size)} / #{file.size} bytes)"
-
if write_file
-
source_uri = file.uri.value
-
log_lines( log_file, "Starting file export of #{export_what} at #{Time.now}." )
-
bytes_copied = ExportFilesHelper.export_file_uri( source_uri: source_uri, target_file: export_file_name )
-
total_byte_count += bytes_copied
-
log_lines( log_file, "Finished file export of #{export_what} at #{Time.now}." )
-
else
-
log_lines( log_file, "Skipping file export of #{export_what} at #{Time.now}." )
-
end
-
end
-
end
-
end_time = Time.now
-
log_lines( log_file,
-
"Total bytes exported: #{total_byte_count} (#{human_readable_size(total_byte_count)})",
-
"... finished yaml generic work export of files at #{end_time}.")
-
rescue Exception => e # rubocop:disable Lint/RescueException
-
# rubocop:disable Rails/Output
-
puts "#{e.class}: #{e.message} at #{e.backtrace.join("\n")}"
-
# rubocop:enable Rails/Output
-
end
-
-
1
def self.yaml_work_find( curation_concern:, source: )
-
if source == SOURCE_DBDv2
-
DataSet.find curation_concern
-
else
-
GenericWork.find curation_concern
-
end
-
end
-
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
module Deepblue
-
-
1
module OrderedStringHelper
-
-
1
class DeserializeError < RuntimeError
-
end
-
-
#
-
# convert a serialized array to a normal array of values
-
# assumes values are stored as json converted to strings
-
# a failure to deserialize throws a DeserializeError,
-
# the exact reason for failure is ignored
-
#
-
1
def self.deserialize( serialized_string_containing_an_array )
-
467
if serialized_string_containing_an_array.start_with?('[')
-
begin
-
467
arr = ActiveSupport::JSON.decode serialized_string_containing_an_array
-
467
return arr if arr.is_a?( Array )
-
rescue ActiveSupport::JSON.parse_error # rubocop:disable Lint/HandleExceptions
-
# ignore and fall through
-
end
-
end
-
raise OrderedStringHelper::DeserializeError
-
end
-
-
#
-
# serialize a normal array of values to an array of ordered values
-
#
-
1
def self.serialize( arr )
-
88
serialized_string_containing_an_array = ActiveSupport::JSON.encode( arr ).to_s
-
88
return serialized_string_containing_an_array
-
end
-
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
module Deepblue
-
-
1
module ProvenanceHelper
-
-
1
extend JsonLoggerHelper
-
1
extend JsonLoggerHelper::ClassMethods
-
-
# rubocop:disable Style/ClassVars
-
1
def self.echo_to_rails_logger
-
5
@@echo_to_rails_logger ||= DeepBlueDocs::Application.config.provenance_log_echo_to_rails_logger
-
end
-
-
1
def self.echo_to_rails_logger=( echo_to_rails_logger )
-
@@echo_to_rails_logger = echo_to_rails_logger
-
end
-
# rubocop:enable Style/ClassVars
-
-
1
def self.log( class_name: 'UnknownClass',
-
event: 'unknown',
-
event_note: '',
-
id: 'unknown_id',
-
timestamp: timestamp_now,
-
time_zone: timestamp_zone,
-
echo_to_rails_logger: ProvenanceHelper.echo_to_rails_logger,
-
**log_key_values )
-
-
4
msg = msg_to_log( class_name: class_name,
-
event: event,
-
event_note: event_note,
-
id: id,
-
timestamp: timestamp,
-
time_zone: time_zone,
-
**log_key_values )
-
4
log_raw msg
-
4
Rails.logger.info msg if echo_to_rails_logger
-
end
-
-
1
def self.log_raw( msg )
-
5
PROV_LOGGER.info( msg )
-
end
-
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
module Deepblue
-
-
1
require './lib/scheduler_logger'
-
-
1
module SchedulerHelper
-
-
1
extend JsonLoggerHelper
-
1
extend JsonLoggerHelper::ClassMethods
-
-
# rubocop:disable Style/ClassVars
-
1
def self.echo_to_rails_logger
-
@@echo_to_rails_logger ||= DeepBlueDocs::Application.config.scheduler_log_echo_to_rails_logger
-
end
-
-
1
def self.echo_to_rails_logger=( echo_to_rails_logger )
-
@@echo_to_rails_logger = echo_to_rails_logger
-
end
-
# rubocop:enable Style/ClassVars
-
-
1
def self.log( class_name: 'UnknownClass',
-
event: 'unknown',
-
event_note: '',
-
id: '',
-
timestamp: timestamp_now,
-
echo_to_rails_logger: SchedulerHelper.echo_to_rails_logger,
-
**log_key_values )
-
-
msg = msg_to_log( class_name: class_name,
-
event: event,
-
event_note: event_note,
-
id: id,
-
timestamp: timestamp,
-
time_zone: LoggingHelper.timestamp_zone,
-
**log_key_values )
-
log_raw msg
-
Rails.logger.info msg if echo_to_rails_logger
-
end
-
-
1
def self.log_raw( msg )
-
SCHEDULER_LOGGER.info( msg )
-
end
-
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
module Deepblue
-
-
1
require './lib/upload_logger'
-
-
1
module UploadHelper
-
-
1
extend JsonLoggerHelper
-
1
extend JsonLoggerHelper::ClassMethods
-
-
# rubocop:disable Style/ClassVars
-
1
def self.echo_to_rails_logger
-
@@echo_to_rails_logger ||= DeepBlueDocs::Application.config.upload_log_echo_to_rails_logger
-
end
-
-
1
def self.echo_to_rails_logger=( echo_to_rails_logger )
-
@@echo_to_rails_logger = echo_to_rails_logger
-
end
-
# rubocop:enable Style/ClassVars
-
-
1
def self.log( class_name: 'UnknownClass',
-
event: 'unknown',
-
event_note: '',
-
id: 'unknown_id',
-
timestamp: timestamp_now,
-
echo_to_rails_logger: UploadHelper.echo_to_rails_logger,
-
**log_key_values )
-
-
msg = msg_to_log( class_name: class_name,
-
event: event,
-
event_note: event_note,
-
id: id,
-
timestamp: timestamp,
-
time_zone: LoggingHelper.timestamp_zone,
-
**log_key_values )
-
log_raw msg
-
Rails.logger.info msg if echo_to_rails_logger
-
end
-
-
1
def self.log_raw( msg )
-
UPLOAD_LOGGER.info( msg )
-
end
-
-
1
def self.uploaded_file_id( uploaded_file )
-
return nil unless uploaded_file.respond_to? :id
-
uploaded_file.id
-
end
-
-
1
def self.uploaded_file_path( uploaded_file )
-
uploaded_file.file.path
-
end
-
-
1
def self.uploaded_file_size( uploaded_file )
-
File.size uploaded_file.file.path
-
end
-
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
module DeepblueHelper
-
-
1
def self.display_timestamp( timestamp )
-
timestamp = timestamp.to_datetime if timestamp.is_a? Time
-
timestamp = DateTime.parse timestamp if timestamp.is_a? String
-
if DeepBlueDocs::Application.config.datetime_stamp_display_local_time_zone
-
timestamp = timestamp.new_offset( DeepBlueDocs::Application.config.timezone_offset )
-
"#{timestamp.strftime("%Y-%m-%d %H:%M:%S")}"
-
else
-
"#{timestamp.strftime("%Y-%m-%d %H:%M:%S")} #{timestamp.formatted_offset(false, 'UTC')}"
-
end
-
end
-
-
1
def self.human_readable_size( value, precision: 3 )
-
ActiveSupport::NumberHelper::NumberToHumanSizeConverter.convert( value, precision: precision )
-
end
-
-
1
def user_agent()
-
user_agent = request.env['HTTP_USER_AGENT']
-
user_agent
-
end
-
-
1
def users_browser()
-
user_agent = user_agent().downcase
-
@users_browser ||= begin
-
if user_agent.index('msie') && !user_agent.index('opera') && !user_agent.index('webtv')
-
# 'ie'+user_agent[user_agent.index('msie')+5].chr
-
'msie'
-
elsif user_agent.index('gecko/')
-
'gecko'
-
elsif user_agent.index('opera')
-
'opera'
-
elsif user_agent.index('konqueror')
-
'konqueror'
-
elsif user_agent.index('ipod')
-
'ipod'
-
elsif user_agent.index('ipad')
-
'ipad'
-
elsif user_agent.index('iphone')
-
'iphone'
-
elsif user_agent.index('chrome/')
-
'chrome'
-
elsif user_agent.index('applewebkit/')
-
'safari'
-
elsif user_agent.index('googlebot/')
-
'googlebot'
-
elsif user_agent.index('msnbot')
-
'msnbot'
-
elsif user_agent.index('yahoo! slurp')
-
'yahoobot'
-
elsif user_agent.index('mozilla/5.0 (windows nt 6.3; win64, x64')
-
'msie'
-
elsif user_agent.index('mozilla/5.0 (windows nt 10.0; win64; x64)')
-
'msie'
-
#Everything thinks it's mozilla, so this goes last
-
elsif user_agent.index('mozilla/')
-
'gecko'
-
else
-
'unknown'
-
end
-
end
-
-
return @users_browser
-
end
-
-
end
-
1
module Hyrax
-
1
module AbilityHelper
-
1
def visibility_options(variant)
-
options = [
-
Hydra::AccessControls::AccessRight::VISIBILITY_TEXT_VALUE_PUBLIC,
-
Hydra::AccessControls::AccessRight::VISIBILITY_TEXT_VALUE_AUTHENTICATED,
-
Hydra::AccessControls::AccessRight::VISIBILITY_TEXT_VALUE_PRIVATE
-
]
-
case variant
-
when :restrict
-
options.delete_at(0)
-
options.delete_at(0)
-
options.reverse!
-
when :loosen
-
options.delete_at(1)
-
options.delete_at(1)
-
end
-
options.map { |value| [visibility_text(value), value] }
-
end
-
-
1
def visibility_badge(value)
-
PermissionBadge.new(value).render
-
end
-
-
1
def render_visibility_link(document)
-
# Admin Sets do not have a visibility property.
-
return if document.respond_to?(:admin_set?) && document.admin_set?
-
-
# Anchor must match with a tab in
-
# https://github.com/samvera/hyrax/blob/master/app/views/hyrax/base/_guts4form.html.erb#L2
-
path = if document.collection?
-
hyrax.edit_dashboard_collection_path(document, anchor: 'share')
-
else
-
edit_polymorphic_path([main_app, document], anchor: 'share')
-
end
-
link_to(
-
visibility_badge(document.visibility),
-
path,
-
id: "permission_#{document.id}",
-
class: 'visibility-link'
-
)
-
end
-
-
1
private
-
-
1
def visibility_text(value)
-
return institution_name if value == Hydra::AccessControls::AccessRight::VISIBILITY_TEXT_VALUE_AUTHENTICATED
-
t("hyrax.visibility.#{value}.text")
-
end
-
end
-
end
-
# frozen_string_literal: true
-
-
1
module Hyrax
-
-
1
module BrandingHelper
-
-
1
def branding_banner_file( id: )
-
# Find Banner filename
-
ci = CollectionBrandingInfo.where( collection_id: id, role: "banner" )
-
brand_path( collection_branding_info: ci[0] ) unless ci.empty?
-
end
-
-
1
def branding_logo_record( id: )
-
logo_info = []
-
# Find Logo filename, alttext, linktext
-
cis = CollectionBrandingInfo.where( collection_id: id, role: "logo" )
-
return if cis.empty?
-
cis.each do |coll_info|
-
logo_file = File.split(coll_info.local_path).last
-
file_location = brand_path( collection_branding_info: coll_info ) unless logo_file.empty?
-
alttext = coll_info.alt_text
-
linkurl = coll_info.target_url
-
logo_info << { file: logo_file, file_location: file_location, alttext: alttext, linkurl: linkurl }
-
end
-
logo_info
-
end
-
-
1
def brand_path( collection_branding_info: )
-
rv = collection_branding_info
-
local_path = collection_branding_info.local_path
-
return rv if local_path.blank?
-
local_path_relative = local_path.split("/")[-4..-1].join('/')
-
::Deepblue::LoggingHelper.bold_debug [ ::Deepblue::LoggingHelper.here,
-
::Deepblue::LoggingHelper.called_from,
-
"collection_branding_info = #{collection_branding_info}",
-
"local_path = #{local_path}",
-
"local_path_relative=#{local_path_relative}",
-
"" ]
-
rv = ::DeepBlueDocs::Application.config.relative_url_root + "/" + local_path_relative
-
::Deepblue::LoggingHelper.bold_debug [ ::Deepblue::LoggingHelper.here,
-
::Deepblue::LoggingHelper.called_from,
-
"rv = #{rv}",
-
"" ]
-
return rv
-
end
-
-
1
def branding_banner_info( id: )
-
@banner_info ||= begin
-
# Find Banner filename
-
banner_info = collection_banner_info( id: id )
-
banner_file = File.split(banner_info.first.local_path).last unless banner_info.empty?
-
file_location = banner_info.first.local_path unless banner_info.empty?
-
relative_path = brand_path( collection_branding_info: banner_info.first ) unless banner_info.empty?
-
{ file: banner_file, full_path: file_location, relative_path: relative_path }
-
end
-
end
-
-
1
def branding_logo_info( id: )
-
@logo_info ||= begin
-
# Find Logo filename, alttext, linktext
-
logos_info = collection_logo_info( id: id )
-
logos_info.map do |logo_info|
-
logo_file = File.split(logo_info.local_path).last
-
relative_path = brand_path( collection_branding_info: logo_info ) unless logo_file.empty?
-
alttext = logo_info.alt_text
-
linkurl = logo_info.target_url
-
{ file: logo_file, full_path: logo_info.local_path, relative_path: relative_path, alttext: alttext, linkurl: linkurl }
-
end
-
end
-
end
-
-
1
def collection_banner_info( id: )
-
CollectionBrandingInfo.where( collection_id: id.to_s ).where( role: "banner" )
-
end
-
-
1
def collection_logo_info( id: )
-
CollectionBrandingInfo.where( collection_id: id.to_s ).where( role: "logo" )
-
end
-
-
1
def branding_file_save( collection_branding_info:, file_location:, copy_file: true )
-
local_dir = branding_file_find_local_dir_name( collection_id: collection_id, role: role )
-
FileUtils.mkdir_p local_dir
-
local_path = collection_branding_info.local_path
-
FileUtils.cp file_location, local_path unless file_location == local_path || !copy_file
-
FileUtils.remove_file(file_location) if File.exist?(file_location) && copy_file
-
super()
-
end
-
-
1
def branding_file_delete( location_path: )
-
FileUtils.remove_file( location_path ) if File.exist?( location_path )
-
end
-
-
1
def branding_file_find_local_filename( collection_id:, role:, filename: )
-
local_dir = branding_file_find_local_dir_name( collection_id: collection_id, role: role )
-
File.join(local_dir, filename)
-
end
-
-
1
def branding_file_find_local_dir_name( collection_id:, role: )
-
File.join( Hyrax.config.branding_path, collection_id.to_s, role.to_s)
-
end
-
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
module Hyrax
-
-
1
module EmbargoHelper
-
-
1
def asset_embargo_release_date( asset: )
-
rv = "#{asset.embargo_release_date} #{Time.zone}"
-
DateTime.parse rv
-
end
-
-
1
def assets_with_expired_embargoes
-
3
::Deepblue::LoggingHelper.bold_debug [ ::Deepblue::LoggingHelper.here,
-
::Deepblue::LoggingHelper.called_from,
-
"" ]
-
3
@assets_with_expired_embargoes ||= EmbargoService.assets_with_expired_embargoes
-
end
-
-
1
def assets_under_embargo
-
::Deepblue::LoggingHelper.bold_debug [ ::Deepblue::LoggingHelper.here,
-
::Deepblue::LoggingHelper.called_from,
-
"" ]
-
@assets_under_embargo ||= EmbargoService.assets_under_embargo
-
end
-
-
1
def assets_with_deactivated_embargoes
-
::Deepblue::LoggingHelper.bold_debug [ ::Deepblue::LoggingHelper.here,
-
::Deepblue::LoggingHelper.called_from,
-
"" ]
-
@assets_with_deactivated_embargoes ||= EmbargoService.assets_with_deactivated_embargoes
-
end
-
-
1
def about_to_expire_embargo_email( asset:, expiration_days:, email_owner: true, test_mode: false, verbose: false )
-
::Deepblue::LoggingHelper.bold_debug [ ::Deepblue::LoggingHelper.here,
-
::Deepblue::LoggingHelper.called_from,
-
::Deepblue::LoggingHelper.obj_class( "asset", asset ),
-
"asset=#{asset}",
-
"expiration_days=#{expiration_days}",
-
"email_owner=#{email_owner}",
-
"test_mode=#{test_mode}",
-
"verbose=#{verbose}",
-
"" ]
-
embargo_release_date = asset.embargo_release_date
-
curation_concern = ::ActiveFedora::Base.find asset.id
-
id = curation_concern.id
-
title = curation_concern.title.join
-
subject = ::Deepblue::EmailHelper.t( "hyrax.email.about_to_expire_embargo.subject", expiration_days: expiration_days, title: title )
-
visibility = visibility_on_embargo_deactivation( curation_concern: curation_concern )
-
url = ::Deepblue::EmailHelper.curation_concern_url( curation_concern: curation_concern )
-
email = curation_concern.authoremail
-
::Deepblue::LoggingHelper.debug "about_to_expire_embargo_email: curation concern id: #{id} email: #{email} exipration_days: #{expiration_days}" if verbose
-
body = []
-
body << ::Deepblue::EmailHelper.t( "hyrax.email.about_to_expire_embargo.for",
-
expiration_days: expiration_days,
-
embargo_release_date: embargo_release_date,
-
title: title,
-
id: id )
-
body << ::Deepblue::EmailHelper.t( "hyrax.email.about_to_expire_embargo.visibility", visibility: visibility )
-
body << ::Deepblue::EmailHelper.t( "hyrax.email.about_to_expire_embargo.visit", url: url )
-
body = body.join( '' )
-
event_note = "#{expiration_days} days"
-
event_note += " test_mode" if test_mode
-
::Deepblue::EmailHelper.log( class_name: self.class.name,
-
current_user: nil,
-
event: "Embargo expiration notification",
-
event_note: event_note,
-
id: id,
-
to: email,
-
from: email,
-
subject: subject,
-
body: body )
-
::Deepblue::EmailHelper.send_email( to: email, from: email, subject: subject, body: body ) unless test_mode
-
return unless DeepBlueDocs::Application.config.embargo_about_to_expire_email_rds
-
email = ::Deepblue::EmailHelper.notification_email
-
::Deepblue::EmailHelper.log( class_name: self.class.name,
-
current_user: nil,
-
event: "Embargo expiration notification",
-
event_note: event_note,
-
id: id,
-
to: email,
-
from: email,
-
subject: subject,
-
body: body )
-
::Deepblue::EmailHelper.send_email( to: email, from: email, subject: subject, body: body ) unless test_mode
-
end
-
-
1
def days_to_embargo_release_date( now: DateTime.now, embargo_release_date: )
-
embargo_release_date = DateTime.parse "#{embargo_release_date} #{Time.zone}" if embargo_release_date.is_a? String
-
((embargo_release_date - @start_of_day).to_f + 0.5).to_i
-
end
-
-
# Update the visibility of the work to match the correct state of the embargo, then clear the embargo date, etc.
-
# Saves the embargo and the work
-
1
def deactivate_embargo( curation_concern:,
-
copy_visibility_to_files:,
-
current_user:,
-
email_owner: true,
-
test_mode: false,
-
verbose: false )
-
::Deepblue::LoggingHelper.bold_debug [ ::Deepblue::LoggingHelper.here,
-
::Deepblue::LoggingHelper.called_from,
-
::Deepblue::LoggingHelper.obj_class( "curation_concern", curation_concern ),
-
"copy_visibility_to_files=#{copy_visibility_to_files}",
-
"email_owner=#{email_owner}",
-
"test_mode=#{test_mode}",
-
"verbose=#{verbose}",
-
"" ]
-
# also probably want to lock the model
-
current_user = Deepblue::ProvenanceHelper.system_as_current_user unless current_user.present?
-
embargo_visibility = curation_concern.visibility
-
if curation_concern.is_a? FileSet
-
::Deepblue::LoggingHelper.debug "deactivate_embargo for file_set: curation concern id: #{curation_concern.id}" if verbose
-
curation_concern.visibility = visibility_on_embargo_deactivation( curation_concern: curation_concern )
-
curation_concern.provenance_unembargo( current_user: Deepblue::ProvenanceHelper.system_as_current_user,
-
embargo_visibility: embargo_visibility,
-
embargo_visibility_after: curation_concern.visibility )
-
curation_concern.save! unless test_mode
-
else
-
curation_concern.embargo_visibility! unless test_mode # If the embargo has lapsed, update the current visibility.
-
curation_concern.deactivate_embargo!( current_user: Deepblue::ProvenanceHelper.system_as_current_user ) unless test_mode
-
curation_concern.embargo.save! unless test_mode
-
rv = false
-
rv = curation_concern.save! unless test_mode
-
curation_concern.copy_visibility_to_files if copy_visibility_to_files && !test_mode
-
deactivate_embargo_email( curation_concern: curation_concern, test_mode: test_mode ) if email_owner
-
rv
-
end
-
end
-
-
1
def deactivate_embargo_email( curation_concern:, test_mode:, verbose: false )
-
id = curation_concern.id
-
title = curation_concern.title.join
-
subject = ::Deepblue::EmailHelper.t( "hyrax.email.deactivate_embargo.subject", title: title )
-
url = ::Deepblue::EmailHelper.curation_concern_url( curation_concern: curation_concern )
-
body = []
-
body << ::Deepblue::EmailHelper.t( "hyrax.email.deactivate_embargo.for",
-
title: title,
-
id: id,
-
visibility: curation_concern.visibility )
-
body << ::Deepblue::EmailHelper.t( "hyrax.email.deactivate_embargo.visit", url: url )
-
body = body.join( '' )
-
event_note = ''
-
event_note = "test_mode" if test_mode
-
email = curation_concern.authoremail
-
::Deepblue::LoggingHelper.debug "deactivate_embargo_email: curation concern id: #{id} email: #{email}" if verbose
-
::Deepblue::EmailHelper.log( class_name: self.class.name,
-
current_user: nil,
-
event: "Deactivate embargo",
-
event_note: event_note,
-
id: id,
-
to: email,
-
from: email,
-
subject: subject,
-
body: body )
-
::Deepblue::EmailHelper.send_email( to: email, from: email, subject: subject, body: body ) unless test_mode
-
return unless DeepBlueDocs::Application.config.embargo_deactivate_email_rds
-
email = ::Deepblue::EmailHelper.notification_email
-
::Deepblue::LoggingHelper.debug "deactivate_embargo_email: curation concern id: #{id} email: #{email}" if verbose
-
::Deepblue::EmailHelper.log( class_name: self.class.name,
-
current_user: nil,
-
event: "Deactivate embargo",
-
event_note: event_note,
-
id: id,
-
to: email,
-
from: email,
-
subject: subject,
-
body: body )
-
::Deepblue::EmailHelper.send_email( to: email, from: email, subject: subject, body: body ) unless test_mode
-
end
-
-
1
def embargo_added( curation_concern:, update_attr_key_values: )
-
::Deepblue::LoggingHelper.bold_debug [ ::Deepblue::LoggingHelper.here,
-
::Deepblue::LoggingHelper.called_from,
-
::Deepblue::LoggingHelper.obj_class( "curation_concern", curation_concern ),
-
"curation_concern.id=#{curation_concern.id}",
-
"update_attr_key_values=#{update_attr_key_values}",
-
"" ]
-
false
-
end
-
-
1
def have_assets_under_embargo?( current_user_key )
-
embargoes = my_assets_under_embargo( current_user_key )
-
return false if embargoes.blank?
-
hide_files = DeepBlueDocs::Application.config.embargo_manage_hide_files
-
return true unless hide_files
-
embargoes.each do |curation_concern|
-
hrt = curation_concern.human_readable_type
-
return true if hrt != 'File'
-
end
-
return false
-
end
-
-
1
def my_assets_with_expired_embargoes( current_user_key )
-
@my_assets_with_expired_embargoes ||= EmbargoService.my_assets_with_expired_embargoes( current_user_key )
-
end
-
-
1
def my_assets_under_embargo( current_user_key )
-
@my_assets_under_embargo ||= EmbargoService.my_assets_under_embargo( current_user_key )
-
end
-
-
1
def my_assets_with_deactivated_embargoes( current_user_key )
-
@my_assets_with_deactivated_embargoes ||= EmbargoService.my_assets_with_deactivated_embargoes( current_user_key )
-
end
-
-
1
def warn_deactivate_embargo_email( curation_concern:, days: )
-
# TODO
-
end
-
-
1
def visibility_on_embargo_deactivation( curation_concern: )
-
curation_concern.to_solr["visibility_after_embargo_ssim"]
-
end
-
-
end
-
-
end
-
1
module HyraxHelper
-
1
include ::BlacklightHelper
-
1
include Hyrax::BlacklightOverride
-
1
include Hyrax::HyraxHelperBehavior
-
-
# override hyrax method
-
# Which translations are available for the user to select
-
# @return [Hash{String => String}] locale abbreviations as keys and flags as values
-
1
def available_translations
-
{
-
3
'en' => 'English'
-
}
-
end
-
-
# @param [Hash] options from blacklight invocation of helper_method
-
# @see #index_field_link params
-
# @return [String]
-
1
def human_readable_file_size(options)
-
value = options[:value].first
-
ActiveSupport::NumberHelper::NumberToHumanSizeConverter.convert( value, precision: 3 )
-
end
-
-
# def link_to_profile(login)
-
# user = ::User.find_by_user_key(login)
-
# return login if user.nil?
-
#
-
# text = if user.respond_to? :name
-
# user.name
-
# else
-
# login
-
# end
-
#
-
# href = profile_path(user)
-
#
-
# # TODO: ?? still needed ?? Fix the link to the user profiles when the sufia object isn't available.
-
# link_to text, href
-
# end
-
-
1
def self.nbsp_or_value( value )
-
return " " if value.nil?
-
return " " if value.to_s.empty?
-
return value
-
end
-
-
# Overrides AbilityHelper.render_visibility_link to fix bug reported in
-
# UMRDR issue 727: Link provided by render_visibility_link method had
-
# path that displays a form to edit all attributes for a document. New
-
# method simply renders the visibility_badge for the document.
-
1
def render_visibility_link(document)
-
visibility_badge(document.visibility)
-
end
-
-
# A Blacklight index field helper_method
-
# @param [Hash] options from blacklight helper_method invocation. Maps rights statement URIs to links with labels.
-
# @return [ActiveSupport::SafeBuffer] rights statement links, html_safe
-
1
def rights_license_links(options)
-
service = Hyrax::RightsLicenseService.new
-
to_sentence(options[:value].map { |right| link_to service.label(right), right })
-
end
-
-
1
def t_uri(key, scope: [])
-
new_scope = scope.collect do |arg|
-
if arg.is_a?(String)
-
arg.tr('.', '_')
-
else
-
arg
-
end
-
end
-
I18n.t(key, scope: new_scope)
-
end
-
end
-
# frozen_string_literal: true
-
-
1
module JobHelper
-
-
1
def job_options_value(options, key:, default_value: nil, verbose: false )
-
return default_value if options.blank?
-
return default_value unless options.key? key
-
# if [true, false].include? default_value
-
# return options[key].to_bool
-
# end
-
::Deepblue::LoggingHelper.debug "set key #{key} to #{options[key]}" if verbose
-
return options[key]
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
module MsgHelper
-
1
extend ActionView::Helpers::TranslationHelper
-
-
1
FIELD_SEP = '; '
-
-
1
def self.creator( curation_concern, field_sep: FIELD_SEP )
-
curation_concern.creator.join( field_sep )
-
end
-
-
1
def self.description( curation_concern, field_sep: FIELD_SEP )
-
curation_concern.description.join( field_sep )
-
end
-
-
1
def self.globus_link( curation_concern )
-
::GlobusJob.external_url curation_concern.id
-
end
-
-
1
def self.publisher( curation_concern, field_sep: FIELD_SEP )
-
curation_concern.publisher.join( field_sep )
-
end
-
-
1
def self.subject_discipline( curation_concern, field_sep: FIELD_SEP )
-
curation_concern.subject_discipline.join( field_sep )
-
end
-
-
1
def self.title( curation_concern, field_sep: FIELD_SEP )
-
curation_concern.title.join( field_sep )
-
end
-
-
1
def self.work_location( curation_concern: nil )
-
# Rails.application.routes.url_helpers.hyrax_data_set_url( id: curation_concern.id )
-
# Rails.application.routes.url_helpers.url_for( only_path: false,
-
# action: 'show',
-
# host: "http://todo.com",
-
# controller: 'concern/data_sets',
-
# id: id )
-
"work location for: #{curation_concern.class.name} #{curation_concern.id}"
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
class DataSetIndexer < Hyrax::WorkIndexer
-
# This indexes the default metadata. You can remove it if you want to
-
# provide your own metadata and indexing.
-
1
include Hyrax::IndexesBasicMetadata
-
-
# Fetch remote labels for based_near. You can remove this if you don't want
-
# this behavior
-
1
include Hyrax::IndexesLinkedMetadata
-
-
# Uncomment this block if you want to add custom indexing behavior:
-
# def generate_solr_document
-
# super.tap do |solr_doc|
-
# solr_doc['my_custom_field_ssim'] = object.my_custom_property
-
# end
-
# end
-
-
1
def generate_solr_document
-
super.tap do |solr_doc|
-
# ### same as
-
# solr_doc[Solrizer.solr_name('member_ids', :symbol)] = object.member_ids
-
# solr_doc[Solrizer.solr_name('member_of_collections', :symbol)] = object.member_of_collections.map(&:first_title)
-
# solr_doc[Solrizer.solr_name('member_of_collection_ids', :symbol)] = object.member_of_collections.map(&:id)
-
# ### this:
-
# solr_doc['member_ids_ssim'] = object.member_ids
-
# solr_doc['member_of_collections_ssim'] = object.member_of_collections.map(&:first_title)
-
# solr_doc['member_of_collection_ids_ssim'] = object.member_of_collections.map(&:id)
-
-
solr_doc[Solrizer.solr_name('creator_ordered', :stored_searchable)] = object.creator_ordered
-
solr_doc[Solrizer.solr_name('doi', :symbol)] = object.doi
-
-
value = Array( object.referenced_by ).join( " " )
-
solr_doc[Solrizer.solr_name('referenced_by', :stored_searchable)] = value
-
# solr_doc[Solrizer.solr_name('referenced_by', :stored_sortable)] = value
-
-
# So that we can sort by title.
-
value = Array( object.title ).join( " " )
-
solr_doc[Solrizer.solr_name('title', :stored_searchable,)] = value
-
solr_doc[Solrizer.solr_name('title', :stored_sortable)] = value
-
-
solr_doc[Solrizer.solr_name('title_ordered', :stored_searchable)] = object.title_ordered
-
solr_doc[Solrizer.solr_name('tombstone', :symbol)] = object.tombstone
-
# solr_doc[Solrizer.solr_name('total_file_size', Hyrax::FileSetIndexer::STORED_LONG)] = object.total_file_size
-
solr_doc[Solrizer.solr_name('total_file_size', Hyrax::FileSetIndexer::STORED_LONG)] = object.size_of_work
-
-
# ### same as
-
# admin_set_label = object.admin_set.to_s
-
# solr_doc[Solrizer.solr_name('admin_set', :facetable)] = admin_set_label
-
# solr_doc[Solrizer.solr_name('admin_set', :stored_searchable)] = admin_set_label
-
# ### this:
-
# admin_set_label = object.admin_set.to_s
-
# solr_doc['admin_set_sim'] = admin_set_label
-
# solr_doc['admin_set_tesim'] = admin_set_label
-
end
-
end
-
-
end
-
# Generated via
-
# `rails generate hyrax:work Dissertation`
-
1
class DissertationIndexer < Hyrax::WorkIndexer
-
# This indexes the default metadata. You can remove it if you want to
-
# provide your own metadata and indexing.
-
1
include Hyrax::IndexesBasicMetadata
-
-
# Fetch remote labels for based_near. You can remove this if you don't want
-
# this behavior
-
1
include Hyrax::IndexesLinkedMetadata
-
-
# Uncomment this block if you want to add custom indexing behavior:
-
# def generate_solr_document
-
# super.tap do |solr_doc|
-
# solr_doc['my_custom_field_ssim'] = object.my_custom_property
-
# end
-
# end
-
end
-
# Generated via
-
# `rails generate hyrax:work GenericWork`
-
1
class GenericWorkIndexer < Hyrax::WorkIndexer
-
# This indexes the default metadata. You can remove it if you want to
-
# provide your own metadata and indexing.
-
1
include Hyrax::IndexesBasicMetadata
-
-
# Fetch remote labels for based_near. You can remove this if you don't want
-
# this behavior
-
1
include Hyrax::IndexesLinkedMetadata
-
-
# Uncomment this block if you want to add custom indexing behavior:
-
# def generate_solr_document
-
# super.tap do |solr_doc|
-
# solr_doc['my_custom_field_ssim'] = object.my_custom_property
-
# end
-
# end
-
end
-
# frozen_string_literal: true
-
-
1
module Hyrax
-
1
class CollectionWithBasicMetadataIndexer < CollectionIndexer
-
1
include Hyrax::IndexesBasicMetadata
-
-
1
def generate_solr_document
-
super.tap do |solr_doc|
-
solr_doc[Solrizer.solr_name('creator_ordered', :stored_searchable)] = object.creator_ordered
-
end
-
end
-
-
end
-
end
-
# frozen_string_literal: true
-
-
1
module Hyrax
-
# monkey
-
-
1
class FileSetIndexer < ActiveFedora::IndexingService
-
1
include Hyrax::IndexesThumbnails
-
1
include Hyrax::IndexesBasicMetadata
-
1
STORED_LONG = Solrizer::Descriptor.new(:long, :stored)
-
-
1
def generate_solr_document
-
super.tap do |solr_doc|
-
solr_doc['hasRelatedMediaFragment_ssim'] = object.representative_id
-
solr_doc['hasRelatedImage_ssim'] = object.thumbnail_id
-
# Label is the actual file name. It's not editable by the user.
-
solr_doc['label_tesim'] = object.label
-
solr_doc['label_ssi'] = object.label
-
solr_doc['file_format_tesim'] = file_format
-
solr_doc['file_format_sim'] = file_format
-
# ::Deepblue::LoggingHelper.bold_debug "FileSetIndex.generate_solr_document #{object.class} #{object.file_size[0]}"
-
solr_doc['file_size_lts'] = object.file_size[0]
-
# ::Deepblue::LoggingHelper.bold_debug "FileSetIndex.generate_solr_document solr_doc['file_size_lts'] = #{solr_doc['file_size_lts']}"
-
solr_doc['all_text_timv'] = object.extracted_text.content if object.extracted_text.present?
-
solr_doc['height_is'] = Integer(object.height.first) if object.height.present?
-
solr_doc['width_is'] = Integer(object.width.first) if object.width.present?
-
solr_doc['visibility_ssi'] = object.visibility
-
solr_doc['mime_type_ssi'] = object.mime_type
-
# Index the Fedora-generated SHA1 digest to create a linkage between
-
# files on disk (in fcrepo.binary-store-path) and objects in the repository.
-
solr_doc['digest_ssim'] = digest_from_content
-
solr_doc['page_count_tesim'] = object.page_count
-
solr_doc['file_title_tesim'] = object.file_title
-
solr_doc['duration_tesim'] = object.duration
-
solr_doc['sample_rate_tesim'] = object.sample_rate
-
solr_doc['original_checksum_tesim'] = object.original_checksum
-
-
parent = object.parent
-
if parent
-
solr_doc['parented_bsi'] = true
-
solr_doc['is_child_of_ssi'] = parent.id
-
solr_doc['parent_path_tesi'] = Rails.application.routes.url_helpers.polymorphic_path(parent)
-
else
-
solr_doc['parented_bsi'] = false
-
end
-
end
-
end
-
-
1
private
-
-
1
def digest_from_content
-
return unless object.original_file
-
object.original_file.digest.first.to_s
-
end
-
-
1
def file_format
-
if object.mime_type.present? && object.format_label.present?
-
"#{object.mime_type.split('/').last} (#{object.format_label.join(', ')})"
-
elsif object.mime_type.present?
-
object.mime_type.split('/').last
-
elsif object.format_label.present?
-
object.format_label
-
end
-
end
-
end
-
end
-
# frozen_string_literal: true
-
-
1
require_relative '../services/deepblue/about_to_expire_embargoes_service'
-
-
1
class AboutToExpireEmbargoesJob < ::Hyrax::ApplicationJob
-
1
include JobHelper
-
1
queue_as :scheduler
-
-
1
def perform( *args )
-
2
::Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
Deepblue::LoggingHelper.called_from,
-
Deepblue::LoggingHelper.obj_class( 'class', self ),
-
"args=#{args}",
-
Deepblue::LoggingHelper.obj_class( 'args', args ),
-
"" ]
-
2
::Deepblue::SchedulerHelper.log( class_name: self.class.name, event: "about_to_expire_embargoes" )
-
2
options = {}
-
4
args.each { |key,value| options[key] = value }
-
2
::Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
"options=#{options}",
-
Deepblue::LoggingHelper.obj_class( 'options', options ),
-
"" ]
-
2
verbose = job_options_value(options, key: 'verbose', default_value: false )
-
2
::Deepblue::LoggingHelper.debug "verbose=#{verbose}" if verbose
-
2
email_owner = job_options_value(options, key: 'email_owner', default_value: true )
-
2
::Deepblue::LoggingHelper.debug "email_owner=#{email_owner}" if verbose
-
2
expiration_lead_days = job_options_value(options, key: 'expiration_lead_days' )
-
2
::Deepblue::LoggingHelper.debug "expiration_lead_days=#{expiration_lead_days}" if verbose
-
2
skip_file_sets = job_options_value(options, key: 'skip_file_sets', default_value: true )
-
2
::Deepblue::LoggingHelper.debug "@skip_file_sets=#{skip_file_sets}" if verbose
-
2
test_mode = job_options_value(options, key: 'test_mode', default_value: false )
-
2
::Deepblue::LoggingHelper.debug "test_mode=#{test_mode}" if verbose
-
2
::Deepblue::AboutToExpireEmbargoesService.new( email_owner: email_owner,
-
expiration_lead_days: expiration_lead_days,
-
skip_file_sets: skip_file_sets,
-
test_mode: test_mode,
-
verbose: verbose ).run
-
rescue Exception => e # rubocop:disable Lint/RescueException
-
Rails.logger.error "#{e.class} #{e.message} at #{e.backtrace[0]}"
-
Rails.logger.error e.backtrace.join("\n")
-
raise e
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
class GlobusCopyJob < GlobusJob
-
1
queue_as :globus_copy
-
-
# @param [String] concern_id
-
# @param [String, "Globus: " ] log_prefix
-
# @param [boolean, false] generate_error
-
# @param [integer, 0 ] delay_per_file_seconds
-
# @param [String, nil ] user_email
-
1
def perform( concern_id, log_prefix: "Globus: ", generate_error: false, delay_per_file_seconds: 0, user_email: nil )
-
globus_job_perform( concern_id: concern_id, email: user_email, log_prefix: "#{log_prefix}globus_copy_job" ) do
-
Deepblue::LoggingHelper.debug "#{@globus_log_prefix} begin copy" unless @globus_job_quiet
-
@target_download_dir = target_download_dir2 @globus_concern_id
-
@target_prep_dir = target_prep_dir2( @globus_concern_id, prefix: nil, mkdir: true )
-
@target_prep_dir_tmp = target_prep_tmp_dir2( @globus_concern_id, prefix: nil, mkdir: true )
-
curation_concern = ActiveFedora::Base.find @globus_concern_id
-
globus_email_rds( curation_concern: curation_concern, description: "copy job started for work #{curation_concern.id}" )
-
metadata_file = curation_concern.metadata_report( dir: @target_prep_dir_tmp, filename_pre: 'w_' )
-
move_destination = GlobusJob.target_file_name( @target_prep_dir, metadata_file.basename )
-
Deepblue::LoggingHelper.debug "#{@globus_log_prefix} mv #{metadata_file} to #{move_destination}" unless @globus_job_quiet
-
FileUtils.move( metadata_file, move_destination )
-
file_sets = curation_concern.file_sets
-
do_copy_predicate = ->(target_file_name, _target_file) { globus_do_copy?( target_file_name ) }
-
Deepblue::ExportFilesHelper.export_file_sets( target_dir: @target_prep_dir_tmp,
-
file_sets: file_sets,
-
log_prefix: @globus_log_prefix,
-
do_export_predicate: do_copy_predicate ) do |target_file_name, target_file|
-
sleep delay_per_file_seconds if delay_per_file_seconds.positive?
-
move_destination = GlobusJob.target_file_name( @target_prep_dir, target_file_name )
-
Deepblue::LoggingHelper.debug "#{@globus_log_prefix} mv #{target_file} to #{move_destination}" unless @globus_job_quiet
-
FileUtils.chmod( @@globus_copy_file_permissions, target_file )
-
FileUtils.chown( nil, @@globus_copy_file_group, target_file ) unless @@globus_copy_file_group.blank?
-
FileUtils.move( target_file, move_destination )
-
if generate_error
-
@globus_lock_file = nil
-
raise StandardError, "generated error"
-
end
-
end
-
Deepblue::LoggingHelper.debug "#{@globus_log_prefix} mv #{@target_prep_dir} to #{@target_download_dir}" unless @globus_job_quiet
-
FileUtils.move( @target_prep_dir, @target_download_dir )
-
FileUtils.rmdir @target_prep_dir_tmp
-
Deepblue::LoggingHelper.debug "#{@globus_log_prefix} copy complete" unless @globus_job_quiet
-
begin
-
globus_copy_job_email_add( user_email )
-
globus_copy_job_email_add( Deepblue::EmailHelper.notification_email )
-
@email_lines = globus_copy_job_complete_lines( curation_concern )
-
globus_copy_job_email_all
-
globus_copy_job_email_clean
-
rescue Exception => e # rubocop:disable Lint/RescueException
-
# msg = "#{@globus_log_prefix} #{e.class}: #{e.message} at #{e.backtrace[0]}"
-
msg = "#{@globus_log_prefix} #{e.class}: #{e.message} at #{e.backtrace.join("\n")}"
-
Rails.logger.error msg
-
end
-
end
-
end
-
-
1
def globus_copy_job_email_add( email = nil )
-
email_file = globus_copy_job_email_file
-
Deepblue::LoggingHelper.debug "#{@globus_log_prefix} globus_copy_job_email_add #{email} to #{email_file}" unless @globus_job_quiet
-
open( email_file, 'a' ) do |file|
-
globus_file_lock( file ) do |out|
-
out << if email.nil?
-
''
-
else
-
"#{email}\n"
-
end
-
end
-
end
-
end
-
-
1
protected
-
-
1
@email_lines = []
-
-
1
def globus_copy_job_complete_lines( curation_concern )
-
lines = []
-
lines << "Globus download is now available."
-
lines << "Work: #{MsgHelper.title(curation_concern)}"
-
lines << "At: #{MsgHelper.work_location(curation_concern: curation_concern)}"
-
lines << "By: #{MsgHelper.creator(curation_concern)}"
-
lines << "Deposited by: #{curation_concern.depositor}"
-
lines << "Globus link: #{MsgHelper.globus_link(curation_concern)}"
-
return lines
-
rescue Exception => e # rubocop:disable Lint/RescueException
-
# msg = "#{@globus_log_prefix} #{e.class}: #{e.message} at #{e.backtrace[0]}"
-
msg = "#{@globus_log_prefix} #{e.class}: #{e.message} at #{e.backtrace.join("\n")}"
-
Rails.logger.error msg
-
end
-
-
1
def globus_copy_job_email_user( email: nil, lines: [] )
-
return if email.blank?
-
return if lines.blank?
-
Deepblue::LoggingHelper.debug "#{@globus_log_prefix} globus_copy_job_email_user: work id: #{@globus_concern_id} email: #{email}" unless @globus_job_quiet
-
body = lines.join( "\n" )
-
to = email
-
from = email
-
subject = 'DBD: Globus Work Files Available'
-
Deepblue::EmailHelper.log( class_name: 'GlobusCopyJob',
-
current_user: nil,
-
event: Deepblue::AbstractEventBehavior::EVENT_GLOBUS,
-
event_note: 'files available',
-
id: @globus_concern_id,
-
to: to,
-
from: from,
-
subject: subject,
-
body: lines )
-
Deepblue::EmailHelper.send_email( to: to, from: from, subject: subject, body: body )
-
end
-
-
1
def globus_copy_job_email_all( emails: nil, lines: [] )
-
emails = globus_copy_job_emails if emails.blank?
-
Deepblue::LoggingHelper.debug "#{@globus_log_prefix} globus_copy_job_email_all emails=#{emails}" unless @globus_job_quiet
-
return if emails.count.zero?
-
lines = @email_lines if lines.blank?
-
Deepblue::LoggingHelper.debug "#{@globus_log_prefix} globus_copy_job_email_all lines=#{lines}" unless @globus_job_quiet
-
emails.each { |email| globus_copy_job_email_user( email: email, lines: lines ) }
-
rescue Exception => e # rubocop:disable Lint/RescueException
-
# msg = "#{@globus_log_prefix} #{e.class}: #{e.message} at #{e.backtrace[0]}"
-
msg = "#{@globus_log_prefix} #{e.class}: #{e.message} at #{e.backtrace.join("\n")}"
-
Rails.logger.error msg
-
end
-
-
1
def globus_copy_job_emails
-
email_addresses = {}
-
email_file = globus_copy_job_email_file
-
Deepblue::LoggingHelper.debug "#{@globus_log_prefix} globus_copy_job_emails email_file=#{email_file}" unless @globus_job_quiet
-
if File.exist? email_file
-
# read the file, one email address per line
-
open( email_file, 'r' ) do |file|
-
globus_file_lock( file, mode: File::LOCK_SH ) do |fin|
-
until fin.eof?
-
line = fin.readline
-
line = line.chomp!
-
Deepblue::LoggingHelper.debug "#{@globus_log_prefix} globus_copy_job_emails line=#{line}" unless @globus_job_quiet
-
email_addresses[line] = true unless line.empty?
-
end
-
end
-
end
-
end
-
return email_addresses.keys
-
end
-
-
1
def globus_do_copy?( target_file_name )
-
prep_file_name = GlobusJob.target_file_name( @target_prep_dir, target_file_name )
-
do_copy = true
-
if File.exist? prep_file_name
-
Deepblue::LoggingHelper.debug "#{@globus_log_prefix} skipping copy because #{prep_file_name} already exists" unless @globus_job_quiet
-
do_copy = false
-
end
-
do_copy
-
end
-
-
1
def globus_job_complete?
-
globus_copy_job_complete? @globus_concern_id
-
end
-
-
1
def globus_job_complete_file
-
globus_ready_file
-
end
-
-
1
def globus_job_perform_in_progress( email: nil )
-
globus_copy_job_email_add( email )
-
super.globus_job_perform_in_progress( email: email )
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
class GlobusJob < ::Hyrax::ApplicationJob
-
-
1
@@globus_era_timestamp = DeepBlueDocs::Application.config.globus_era_timestamp
-
1
@@globus_era_token = DeepBlueDocs::Application.config.globus_era_token.freeze
-
-
1
@@globus_enabled = DeepBlueDocs::Application.config.globus_enabled.freeze
-
1
@@globus_base_file_name = DeepBlueDocs::Application.config.base_file_name.freeze
-
1
@@globus_base_url = DeepBlueDocs::Application.config.globus_base_url.freeze
-
1
@@globus_download_dir = DeepBlueDocs::Application.config.globus_download_dir.freeze
-
1
@@globus_prep_dir = DeepBlueDocs::Application.config.globus_prep_dir.freeze
-
-
1
@@globus_copy_file_group = DeepBlueDocs::Application.config.globus_copy_file_group.freeze
-
1
@@globus_copy_file_permissions = DeepBlueDocs::Application.config.globus_copy_file_permissions.freeze
-
-
1
def self.files_available?( concern_id )
-
copy_complete? concern_id
-
end
-
-
1
def self.copy_complete?( id )
-
dir = @@globus_download_dir
-
dir = dir.join files_target_file_name( id )
-
Dir.exist? dir
-
end
-
-
1
def self.clean_dir( dir_path, delete_dir: false )
-
return unless Dir.exist? dir_path
-
Dir.foreach( dir_path ) do |f|
-
next if [ '.', '..' ].include? f
-
clean_file File.join( dir_path, f )
-
end
-
return unless delete_dir
-
Dir.delete dir_path
-
end
-
-
1
def self.clean_file( file_path )
-
File.delete file_path if File.exist? file_path
-
end
-
-
1
def self.error_file( id )
-
1
target_file_name_env( @@globus_prep_dir, 'error', target_base_name( id ) )
-
end
-
-
1
def self.error_file_contents( id )
-
contents = nil
-
return contents unless error_file_exists? id
-
file = error_file id
-
open( file, 'r' ) { |f| contents = f.readlines }
-
return contents
-
end
-
-
1
def self.error_file_exists?( id, write_error_to_log: false, log_prefix: '', quiet: true )
-
1
error_file = error_file( id )
-
1
error_file_exists = false
-
1
if File.exist? error_file
-
if write_error_to_log
-
msg = nil
-
open( error_file, 'r' ) { |f| msg = f.read; msg.chomp! } # rubocop:disable Style/Semicolon
-
Deepblue::LoggingHelper.debug "#{log_prefix} error file contains: #{msg}" unless quiet
-
end
-
error_file_exists = true
-
end
-
1
error_file_exists
-
end
-
-
1
def self.external_url( id )
-
"#{@@globus_base_url}#{files_target_file_name(id)}%2F"
-
end
-
-
1
def self.files_target_file_name( id = '' )
-
"#{@@globus_base_file_name}#{id}"
-
end
-
-
1
def self.files_prepping?( id )
-
rv = !copy_complete?( id ) && locked?( id )
-
rv
-
end
-
-
1
def self.lock( concern_id, log_prefix )
-
1
lock_token = era_token
-
1
lock_file = lock_file concern_id
-
1
Deepblue::LoggingHelper.debug "#{log_prefix} writing lock token #{lock_token} to #{lock_file}" unless @globus_job_quiet
-
2
open( lock_file, 'w' ) { |f| f << lock_token << "\n" }
-
1
File.exist? lock_file
-
end
-
-
1
def self.lock_file( id = '' )
-
3
target_file_name_env( @@globus_prep_dir, 'lock', target_base_name( id ) )
-
end
-
-
1
def self.locked?( concern_id, log_prefix: '', quiet: true )
-
1
return false if error_file_exists?( concern_id, write_error_to_log: true, log_prefix: log_prefix, quiet: quiet )
-
1
lock_file = lock_file concern_id
-
1
return false unless File.exist? lock_file
-
current_token = era_token
-
lock_token = read_token lock_file
-
rv = ( current_token == lock_token )
-
Deepblue::LoggingHelper.debug "#{log_prefix} testing token from #{lock_file}: current_token: #{current_token} == lock_token: #{lock_token}: #{rv}" unless @quiet
-
rv
-
end
-
-
1
def self.read_token( token_file )
-
token = nil
-
open( token_file, 'r' ) { |f| token = f.read.chomp! }
-
return token
-
end
-
-
1
def self.server_prefix( str: '' )
-
16
"#{Rails.env}#{str}"
-
end
-
-
1
def self.target_base_name( id = '', prefix: '', postfix: '' )
-
14
prefix = server_prefix( str: '_' ) if prefix.nil?
-
14
"#{prefix}#{@@globus_base_file_name}#{id}#{postfix}"
-
end
-
-
1
def self.target_file_name_env( dir, file_type, base_name )
-
15
target_file_name( dir, ".#{server_prefix}.#{file_type}.#{base_name}" )
-
end
-
-
1
def self.target_file_name( dir, filename, ext = '' )
-
16
return Pathname.new( filename + ext ) if dir.nil?
-
13
dir.join( filename + ext )
-
end
-
-
1
def self.target_download_dir( concern_id )
-
target_dir_name( @@globus_download_dir, target_base_name(concern_id ) )
-
end
-
-
1
def self.target_dir_name( dir, subdir, mkdir: false )
-
target_dir = dir.join subdir
-
if mkdir
-
Dir.mkdir(target_dir ) unless Dir.exist? target_dir
-
end
-
target_dir
-
end
-
-
1
def self.target_prep_dir( concern_id, prefix: '', postfix: '', mkdir: false )
-
prefix = server_prefix( str: '_' ) if prefix.nil?
-
subdir = target_base_name( concern_id, prefix: prefix, postfix: postfix )
-
target_dir_name( @@globus_prep_dir, subdir, mkdir: mkdir )
-
end
-
-
1
def self.target_prep_tmp_dir( concern_id, prefix: '', postfix: '', mkdir: false )
-
prefix = server_prefix( str: '_' ) if prefix.nil?
-
dir = target_prep_dir( concern_id, prefix: prefix, postfix: "#{postfix}_tmp" )
-
if mkdir
-
Dir.mkdir(dir ) unless Dir.exist? dir
-
end
-
dir
-
end
-
-
1
def self.era_token
-
# read_token @@globus_era_file
-
# read_token @@globus_era.era_file
-
2
@@globus_era_token
-
end
-
-
1
def self.era_token_time
-
timestamp = era_token
-
Time.parse( timestamp )
-
end
-
-
# @param [String] concern_id
-
# @param [String, "Globus: "] log_prefix
-
1
def perform( concern_id, log_prefix: "Globus: " )
-
@globus_concern_id = concern_id
-
@globus_log_prefix = log_prefix
-
@globus_lock_file = GlobusJob.lock_file concern_id
-
end
-
-
1
protected
-
-
1
def globus_acquire_lock?
-
1
return false if globus_locked?
-
1
globus_lock
-
end
-
-
1
def globus_copy_job_complete?( concern_id )
-
Dir.exist? target_download_dir2 concern_id
-
end
-
-
1
def globus_copy_job_email_clean
-
email_file = globus_copy_job_email_file
-
Deepblue::LoggingHelper.debug "#{@globus_log_prefix} globus_copy_job_email_reset exists? #{email_file}" unless @globus_job_quiet
-
return unless File.exist? email_file
-
Deepblue::LoggingHelper.debug "#{@globus_log_prefix} globus_copy_job_email_reset delete #{email_file}" unless @globus_job_quiet
-
File.delete email_file
-
end
-
-
1
def globus_copy_job_email_file
-
rv = GlobusJob.target_file_name_env( @@globus_prep_dir,
-
'copy_job_emails',
-
GlobusJob.target_base_name( @globus_concern_id ) )
-
return rv
-
end
-
-
1
def globus_email_rds( curation_concern: nil, description: '' )
-
curation_concern = ActiveFedora::Base.find @globus_concern_id if curation_concern.nil?
-
return unless curation_concern.respond_to? :email_rds_globus
-
curation_concern.email_rds_globus( current_user: nil, event_note: description )
-
end
-
-
1
def globus_error( msg )
-
file = globus_error_file
-
Deepblue::LoggingHelper.debug "#{@globus_log_prefix} writing error message to #{file}" unless @globus_job_quiet
-
open( file, 'w' ) { |f| f << msg << "\n" }
-
file
-
end
-
-
1
def globus_error_file
-
2
GlobusJob.target_file_name_env( @@globus_prep_dir, 'error', GlobusJob.target_base_name( @globus_concern_id ) )
-
end
-
-
1
def globus_error_file_exists?( write_error_to_log: false )
-
GlobusJob.error_file_exists?( @globus_concern_id,
-
write_error_to_log: write_error_to_log,
-
log_prefix: @globus_log_prefix,
-
quiet: @globus_job_quiet )
-
end
-
-
1
def globus_error_reset
-
2
file = globus_error_file
-
2
File.delete file if File.exist? file
-
2
true
-
end
-
-
1
def globus_file_lock( file, mode: File::LOCK_EX )
-
success = true
-
if File.exist? file
-
success = file.flock( mode )
-
if success
-
begin
-
yield file
-
ensure
-
file.flock( File::LOCK_UN )
-
end
-
end
-
else
-
yield file
-
end
-
return success
-
end
-
-
1
def globus_job_perform( concern_id: '', email: nil, log_prefix: 'Globus: ', quiet: false ) # , &globus_block )
-
1
@globus_concern_id = concern_id
-
1
@globus_log_prefix = log_prefix
-
1
@globus_lock_file = nil
-
1
@globus_job_quiet = quiet
-
1
return unless @@globus_enabled
-
begin
-
1
if globus_job_complete?
-
globus_job_perform_already_complete( email: email )
-
return
-
end
-
1
@globus_lock_file = GlobusJob.lock_file @globus_concern_id
-
1
Deepblue::LoggingHelper.debug "#{@globus_log_prefix} lock file #{@globus_lock_file}" unless @globus_job_quiet
-
rescue Exception => e # rubocop:disable Lint/RescueException
-
msg = "#{@globus_log_prefix} #{e.class}: #{e.message} at #{e.backtrace[0]}"
-
# Rails.logger.error msg
-
Rails.logger.error "#{@globus_log_prefix} #{e.class}: #{e.message} at #{e.backtrace.join("\n")}"
-
globus_error msg
-
return
-
end
-
1
unless globus_acquire_lock?
-
globus_job_perform_in_progress( email: email )
-
return
-
end
-
begin
-
1
globus_error_reset
-
1
globus_job_perform_complete_reset
-
# globus_block.call
-
1
yield if block_given?
-
1
@globus_lock_file = globus_unlock
-
1
globus_job_perform_complete
-
rescue Exception => e # rubocop:disable Lint/RescueException
-
msg = "#{@globus_log_prefix} #{e.class}: #{e.message} at #{e.backtrace[0]}"
-
Rails.logger.error msg
-
globus_error msg
-
ensure
-
1
globus_unlock
-
end
-
end
-
-
1
def globus_job_perform_already_complete( email: nil )
-
if email.nil?
-
Deepblue::LoggingHelper.debug "#{@globus_log_prefix} skipping already complete globus job" unless @globus_job_quiet
-
else
-
Deepblue::LoggingHelper.debug "#{@globus_log_prefix} skipping already complete globus job, email=#{email}" unless @globus_job_quiet
-
end
-
end
-
-
1
def globus_job_perform_in_progress( email: nil )
-
if email.nil?
-
Deepblue::LoggingHelper.debug "#{@globus_log_prefix} skipping in progress globus job" unless @globus_job_quiet
-
else
-
Deepblue::LoggingHelper.debug "#{@globus_log_prefix} skipping in progress globus job, email=#{email}" unless @globus_job_quiet
-
end
-
end
-
-
1
def globus_job_perform_complete
-
1
file = globus_job_complete_file
-
1
timestamp = Time.now.to_s
-
2
open( file, 'w' ) { |f| f << timestamp << "\n" }
-
1
globus_error_reset
-
1
Deepblue::LoggingHelper.debug "#{@globus_log_prefix} job complete at #{timestamp}" unless @globus_job_quiet
-
1
return file
-
end
-
-
1
def globus_job_perform_complete_reset
-
1
file = globus_job_complete_file
-
1
File.delete file if File.exist? file
-
1
true
-
end
-
-
1
def globus_lock
-
1
GlobusJob.lock( @globus_concern_id, @globus_log_prefix )
-
end
-
-
1
def globus_lock_file( id = '' )
-
GlobusJob.lock_file id
-
end
-
-
1
def globus_locked?
-
1
GlobusJob.locked?( @globus_concern_id, log_prefix: @globus_log_prefix, quiet: @globus_job_quiet )
-
end
-
-
1
def globus_ready_file
-
GlobusJob.target_file_name_env( @@globus_prep_dir, 'ready', GlobusJob.target_base_name( @globus_concern_id ) )
-
end
-
-
1
def globus_unlock
-
2
return nil if @globus_lock_file.nil?
-
1
return nil unless File.exist? @globus_lock_file
-
1
Deepblue::LoggingHelper.debug "#{@globus_log_prefix} unlock by deleting file #{@globus_lock_file}" unless @globus_job_quiet
-
1
File.delete @globus_lock_file
-
nil
-
end
-
-
1
def target_download_dir2( concern_id )
-
GlobusJob.target_download_dir( concern_id )
-
end
-
-
1
def target_dir_name2( dir, subdir, mkdir: false )
-
GlobusJob.target_dir_name( dir, subdir, mkdir: mkdir )
-
end
-
-
1
def target_prep_dir2( concern_id, prefix: '', postfix: '', mkdir: false )
-
GlobusJob.target_prep_dir( concern_id, prefix: prefix, postfix: postfix, mkdir: mkdir )
-
end
-
-
1
def target_prep_tmp_dir2( concern_id, prefix: '', postfix: '', mkdir: false )
-
GlobusJob.target_prep_tmp_dir( concern_id, prefix: prefix, postfix: postfix, mkdir: mkdir )
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
class GlobusRestartAllJob < GlobusJob
-
1
queue_as :globus_restart_all
-
-
# @param [String, "Globus: "] log_prefix
-
1
def perform( log_prefix: "Globus: ", quiet: false )
-
1
Deepblue::LoggingHelper.debug "#{log_prefix}globus_restart_all_job starting..." unless quiet
-
1
globus_job_perform( concern_id: "Restart_All", log_prefix: "#{log_prefix}globus_restart_all_job", quiet: quiet ) do
-
1
Deepblue::LoggingHelper.debug "#{@globus_log_prefix} begin restart all" unless @globus_job_quiet
-
1
concern_ids_to_restart = {}
-
1
base_name = GlobusJob.target_base_name ''
-
1
prefix = GlobusJob.target_file_name_env(nil, 'lock', base_name ).to_s
-
1
lock_file_re = Regexp.compile( '^' + prefix + '([0-9a-z-]+)' + '$' )
-
# Deepblue::LoggingHelper.debug "#{@globus_log_prefix} lock_file_re=#{lock_file_re}" unless @globus_job_quiet
-
1
prefix = GlobusJob.target_file_name_env(nil, 'error', base_name ).to_s
-
1
error_file_re = Regexp.compile( '^' + prefix + '([0-9a-z-]+)' + '$' )
-
1
prefix = GlobusJob.target_file_name( nil, "#{GlobusJob.server_prefix(str: '_')}#{base_name}" ).to_s
-
1
prep_dir_re = Regexp.compile( '^' + prefix + '([0-9a-z-]+)' + '$' )
-
# Deepblue::LoggingHelper.debug "#{@globus_log_prefix} prep_dir_re=#{prep_dir_re}" unless @globus_job_quiet
-
1
prep_tmp_dir_re = Regexp.compile( '^' + prefix + '([0-9a-z-]+)_tmp' + '$' )
-
1
starts_with_path = "#{@@globus_prep_dir}#{File::SEPARATOR}"
-
1
files = Dir.glob( "#{starts_with_path}*" )
-
# Deepblue::LoggingHelper.debug "#{@globus_log_prefix} files.size=#{files.size}" unless @globus_job_quiet
-
1
files.each do |f|
-
# Deepblue::LoggingHelper.debug "#{@globus_log_prefix} processing #{f}"
-
11
f = f.slice( (starts_with_path.length)..(f.length) ) if f.starts_with? starts_with_path
-
# Deepblue::LoggingHelper.debug "#{@globus_log_prefix} processing #{f}" unless @globus_job_quiet
-
11
match = lock_file_re.match( f )
-
11
if match
-
# Deepblue::LoggingHelper.debug "#{@globus_log_prefix} lock_file_re=#{lock_file_re} matched #{f}" unless @globus_job_quiet
-
2
concern_id = match[1]
-
2
concern_ids_to_restart.store( concern_id, true )
-
2
next
-
end
-
9
match = error_file_re.match( f )
-
9
if match
-
# Deepblue::LoggingHelper.debug "#{@globus_log_prefix} lock_file_re=#{error_file_re} matched #{f}" unless @globus_job_quiet
-
1
concern_id = match[1]
-
1
concern_ids_to_restart.store( concern_id, true )
-
1
next
-
end
-
8
match = prep_dir_re.match( f )
-
8
if match
-
# Deepblue::LoggingHelper.debug "#{@globus_log_prefix} prep_dir_re=#{prep_dir_re} matched #{f}" unless @globus_job_quiet
-
2
concern_id = match[1]
-
2
concern_ids_to_restart.store( concern_id, true )
-
2
next
-
end
-
6
match = prep_tmp_dir_re.match( f )
-
6
next unless match
-
# Deepblue::LoggingHelper.debug "#{@globus_log_prefix} lock_file_re=#{prep_tmp_dir_re} matched #{f}" unless @globus_job_quiet
-
2
concern_id = match[1]
-
2
concern_ids_to_restart.store( concern_id, true )
-
end
-
1
concern_ids_to_restart.keys.each do |concern_id|
-
# Deepblue::LoggingHelper.debug "#{@globus_log_prefix} restart copy job #{concern_id}" unless @globus_job_quiet
-
5
::GlobusCopyJob.perform_later( concern_id )
-
end
-
1
Deepblue::LoggingHelper.debug "#{@globus_log_prefix} restart all complete" unless @globus_job_quiet
-
end
-
end
-
-
1
protected
-
-
1
def globus_job_complete_file
-
7
GlobusJob.target_file_name_env( @@globus_prep_dir, 'restarted', GlobusJob.target_base_name( @globus_concern_id ) )
-
end
-
-
1
def globus_job_complete?
-
4
file = globus_job_complete_file
-
4
Deepblue::LoggingHelper.debug "#{@globus_log_prefix} globus job complete file #{file}" unless @globus_job_quiet
-
4
return false unless File.exist? file
-
2
last_complete_time = last_complete_time file
-
2
token_time = ::GlobusJob.era_token_time
-
2
Deepblue::LoggingHelper.debug "#{@globus_log_prefix} token_time:#{token_time} <= last_complete_time:#{last_complete_time}" unless @globus_job_quiet
-
2
Deepblue::LoggingHelper.debug "#{@globus_log_prefix} token_time.class:#{token_time.class} <= last_complete_time.class:#{last_complete_time.class}" unless @globus_job_quiet
-
2
token_time <= last_complete_time
-
end
-
-
1
def last_complete_time( file )
-
File.birthtime file
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
module Hyrax
-
# A common base class for all Hyrax jobs.
-
# This allows downstream applications to manipulate all the hyrax jobs by
-
# including modules on this class.
-
1
class ApplicationJob < ::ActiveJob::Base
-
end
-
end
-
# frozen_string_literal: true
-
-
1
class ApplicationMailer < ActionMailer::Base
-
1
default from: 'from@example.com'
-
1
layout 'mailer'
-
end
-
# frozen_string_literal: true
-
-
1
class DeepblueMailer < ApplicationMailer
-
1
default from: Deepblue::EmailHelper.notification_email
-
-
1
layout "mailer.html"
-
-
1
def send_an_email( to:, from:, subject:, body: )
-
1
mail( to: to, from: from, subject: subject, body: body )
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
class Ability
-
1
include Hydra::Ability
-
1
include Hyrax::Ability
-
-
# self.ability_logic += [:everyone_can_create_curation_concerns]
-
1
self.ability_logic += [:deepblue_abilities]
-
-
1
def deepblue_abilities
-
20
alias_action :display_provenance_log, to: :read
-
20
alias_action :globus_clean_download, to: :delete
-
20
alias_action :globus_download, to: :read
-
20
alias_action :globus_add_email, to: :read
-
20
alias_action :globus_download_add_email, to: :read
-
20
alias_action :globus_download_notify_me, to: :read
-
20
alias_action :tombstone, to: :delete
-
20
alias_action :zip_download, to: :read
-
-
# alias_action :confirm, to: :read
-
# alias_action :identifiers, to: :update
-
end
-
-
# Define any customized permissions here.
-
1
def custom_permissions
-
# Limits deleting objects to a the admin user
-
#
-
# if current_user.admin?
-
# can [:destroy], ActiveFedora::Base
-
# end
-
-
# Limits creating new objects to a specific group
-
#
-
# if user_groups.include? 'special_group'
-
# can [:create], ActiveFedora::Base
-
# end
-
-
# restrict depositing permissions
-
20
if can_deposit?
-
3
can [:create], DataSet
-
3
can [:doi], DataSet
-
3
can [:create], FileSet
-
else
-
17
cannot [:create, :edit, :update, :destroy], DataSet
-
17
cannot [:create, :edit, :update, :destroy], FileSet
-
end
-
20
if admin?
-
# can [:create, :show, :add_user, :remove_user, :index, :edit, :update, :destroy], Role # uncomment to expose Role management in UI
-
end
-
end
-
-
1
def can_deposit?
-
20
admin? || depositor?
-
end
-
-
1
def admin?
-
298
current_user.admin? || super
-
end
-
-
1
def depositor?
-
19
depositing_role = Sipity::Role.find_by(name: Hyrax::RoleRegistry::DEPOSITING)
-
19
return false unless depositing_role
-
Hyrax::Workflow::PermissionQuery.scope_processing_agents_for(user: current_user).any? do |agent|
-
agent.workflow_responsibilities.joins(:workflow_role)
-
.where('sipity_workflow_roles.role_id' => depositing_role.id).any?
-
end
-
end
-
end
-
1
class ApplicationRecord < ActiveRecord::Base
-
1
self.abstract_class = true
-
end
-
# frozen_string_literal: true
-
-
1
class Collection < ActiveFedora::Base
-
1
include ::Hyrax::CollectionBehavior
-
-
# You can replace these metadata if they're not suitable
-
# include Hyrax::BasicMetadata
-
1
include Umrdr::UmrdrWorkBehavior
-
1
include Umrdr::UmrdrWorkMetadata
-
-
# This must be included at the end, because it finalizes the metadata
-
# schema (by adding accepts_nested_attributes)
-
1
include ::Deepblue::DefaultMetadata
-
-
1
include ::Deepblue::MetadataBehavior
-
1
include ::Deepblue::EmailBehavior
-
1
include ::Deepblue::ProvenanceBehavior
-
-
1
before_destroy :provenance_before_destroy_collection
-
-
1
self.indexer = Hyrax::CollectionWithBasicMetadataIndexer
-
-
1
def provenance_before_destroy_collection
-
# workflow_destroy does this
-
# provenance_destroy( current_user: '' ) # , event_note: 'provenance_before_destroy_collection' )
-
end
-
-
1
def metadata_keys_all
-
%i[
-
child_collection_ids
-
child_collection_count
-
child_work_ids
-
child_work_count
-
collection_type
-
creator
-
curation_notes_admin
-
curation_notes_user
-
date_created
-
date_modified
-
date_updated
-
description
-
keyword
-
language
-
prior_identifier
-
referenced_by
-
subject_discipline
-
title
-
total_file_size
-
visibility
-
]
-
end
-
-
1
def metadata_keys_brief
-
%i[
-
creator
-
title
-
visibility
-
]
-
end
-
-
1
def metadata_keys_report
-
%i[
-
child_collection_count
-
child_work_count
-
collection_type
-
creator
-
curation_notes_user
-
description
-
keyword
-
language
-
referenced_by
-
subject_discipline
-
title
-
total_file_size
-
]
-
end
-
-
1
def metadata_keys_update
-
%i[
-
creator
-
title
-
visibility
-
]
-
end
-
-
1
def attributes_all_for_email
-
metadata_keys_all
-
end
-
-
1
def attributes_all_for_provenance
-
metadata_keys_all
-
end
-
-
1
def attributes_brief_for_email
-
metadata_keys_brief
-
end
-
-
1
def attributes_brief_for_provenance
-
metadata_keys_brief
-
end
-
-
1
def attributes_standard_for_email
-
metadata_keys_brief
-
end
-
-
1
def attributes_update_for_email
-
metadata_keys_update
-
end
-
-
1
def attributes_update_for_provenance
-
metadata_keys_update
-
end
-
-
1
def for_email_route
-
for_event_route
-
end
-
-
1
def for_event_route
-
Rails.application.routes.url_helpers.hyrax_data_set_path( id: self.id ) # rubocop:disable Style/RedundantSelf
-
end
-
-
1
def for_provenance_route
-
for_event_route
-
end
-
-
1
def child_collection_count
-
ActiveFedora::Base.where("member_of_collection_ids_ssim:#{id} AND generic_type_sim:Collection").count
-
end
-
-
1
def child_collection_ids
-
ActiveFedora::Base.where("member_of_collection_ids_ssim:#{id} AND generic_type_sim:Collection").map { |w| w.id } # rubocop:disable Style/SymbolProc
-
end
-
-
1
def child_work_count
-
ActiveFedora::Base.where("member_of_collection_ids_ssim:#{id} AND generic_type_sim:Work").count
-
end
-
-
1
def child_work_ids
-
ActiveFedora::Base.where("member_of_collection_ids_ssim:#{id} AND generic_type_sim:Work").map { |w| w.id } # rubocop:disable Style/SymbolProc
-
end
-
-
1
def total_file_size
-
21
bytes
-
end
-
-
1
def total_file_size_human_readable
-
value = total_file_size
-
ActiveSupport::NumberHelper::NumberToHumanSizeConverter.convert( value, precision: 3 )
-
end
-
-
1
def title_type
-
human_readable_type
-
end
-
-
1
def map_email_attributes_override!( event:, # rubocop:disable Lint/UnusedMethodArgument
-
attribute:,
-
ignore_blank_key_values:,
-
email_key_values: )
-
value = nil
-
handled = case attribute.to_s
-
when 'child_collection_count'
-
value = child_work_count
-
true
-
when 'child_collection_ids'
-
value = collection_ids
-
when 'child_work_count'
-
value = child_work_count
-
true
-
when 'child_work_ids'
-
value = child_work_ids
-
true
-
when 'collection_type'
-
value = collection_type.machine_id
-
true
-
when 'total_file_size'
-
value = total_file_size
-
true
-
when 'total_file_size_human_readable'
-
value = total_file_size_human_readable
-
true
-
when 'visibility'
-
value = visibility
-
true
-
else
-
false
-
end
-
return false unless handled
-
if ignore_blank_key_values
-
email_key_values[attribute] = value if value.present?
-
else
-
email_key_values[attribute] = value
-
end
-
return true
-
end
-
-
1
def map_provenance_attributes_override!( event:, # rubocop:disable Lint/UnusedMethodArgument
-
attribute:,
-
ignore_blank_key_values:,
-
prov_key_values: )
-
value = nil
-
handled = case attribute.to_s
-
when 'child_collection_count'
-
value = child_work_count
-
true
-
when 'child_collection_ids'
-
value = collection_ids
-
when 'child_work_count'
-
value = child_work_count
-
true
-
when 'child_work_ids'
-
value = child_work_ids
-
true
-
when 'collection_type'
-
value = collection_type.machine_id
-
true
-
when 'total_file_size'
-
value = total_file_size
-
true
-
when 'total_file_size_human_readable'
-
value = total_file_size_human_readable
-
true
-
when 'visibility'
-
value = visibility
-
true
-
else
-
false
-
end
-
return false unless handled
-
if ignore_blank_key_values
-
prov_key_values[attribute] = value if value.present?
-
else
-
prov_key_values[attribute] = value
-
end
-
return true
-
end
-
-
1
def metadata_hash_override( key:, ignore_blank_values:, key_values: )
-
value = nil
-
handled = case key.to_s
-
when 'child_collection_count'
-
value = child_work_count
-
true
-
when 'child_collection_ids'
-
value = collection_ids
-
when 'child_work_count'
-
value = child_work_count
-
true
-
when 'child_work_ids'
-
value = child_work_ids
-
true
-
when 'collection_type'
-
value = collection_type.machine_id
-
true
-
when 'total_file_size'
-
value = total_file_size
-
true
-
when 'total_file_size_human_readable'
-
value = total_file_size_human_readable
-
true
-
when 'visibility'
-
value = visibility
-
true
-
else
-
false
-
end
-
return false unless handled
-
if ignore_blank_values
-
key_values[key] = value if value.present?
-
else
-
key_values[key] = value
-
end
-
return true
-
end
-
-
1
def metadata_report_contained_objects
-
member_objects
-
end
-
-
1
def metadata_report_keys
-
return IGNORE_BLANK_KEY_VALUES, metadata_keys_report
-
end
-
-
1
def metadata_report_label_override( metadata_key:, metadata_value: ) # rubocop:disable Lint/UnusedMethodArgument
-
case metadata_key.to_s
-
when 'child_collection_count'
-
'Child Collection Count: '
-
when 'child_collection_ids'
-
'Child Collection Identifiers: '
-
when 'child_work_count'
-
'Child Work Count: '
-
when 'child_work_ids'
-
'Child Work Identifiers: '
-
when 'collection_type'
-
'Collection Type: '
-
when 'total_file_size'
-
'Total File Size: '
-
when 'total_file_size_human_readable'
-
'Total File Size: '
-
end
-
end
-
-
1
def metadata_report_title_pre
-
'Collection: '
-
end
-
-
# begin metadata
-
-
# the list of creators is ordered
-
1
def creator
-
21
values = super
-
21
values = Deepblue::MetadataHelper.ordered( ordered_values: creator_ordered, values: values )
-
21
return values
-
end
-
-
1
def creator=( values )
-
self.creator_ordered = Deepblue::MetadataHelper.ordered_values( ordered_values: creator_ordered, values: values )
-
super values
-
end
-
-
# the list of curation_notes_admin is ordered
-
1
def curation_notes_admin
-
21
values = super
-
21
values = Deepblue::MetadataHelper.ordered( ordered_values: curation_notes_admin_ordered, values: values )
-
21
return values
-
end
-
-
1
def curation_notes_admin=( values )
-
self.curation_notes_admin_ordered = Deepblue::MetadataHelper.ordered_values( ordered_values: curation_notes_admin_ordered, values: values )
-
super values
-
end
-
-
# the list of curation_notes_user is ordered
-
1
def curation_notes_user
-
21
values = super
-
21
values = Deepblue::MetadataHelper.ordered( ordered_values: curation_notes_user_ordered, values: values )
-
21
return values
-
end
-
-
1
def curation_notes_user=( values )
-
self.curation_notes_user_ordered = Deepblue::MetadataHelper.ordered_values( ordered_values: curation_notes_user_ordered, values: values )
-
super values
-
end
-
-
# the list of description is ordered
-
1
def description
-
21
values = super
-
21
values = Deepblue::MetadataHelper.ordered( ordered_values: description_ordered, values: values )
-
21
return values
-
end
-
-
1
def description=( values )
-
self.description_ordered = Deepblue::MetadataHelper.ordered_values( ordered_values: description_ordered, values: values )
-
super values
-
end
-
-
#
-
# handle the list of referenced_by as ordered
-
#
-
1
def referenced_by
-
21
values = super
-
21
values = Deepblue::MetadataHelper.ordered( ordered_values: referenced_by_ordered, values: values )
-
21
return values
-
end
-
-
1
def referenced_by=( values )
-
self.referenced_by_ordered = Deepblue::MetadataHelper.ordered_values( ordered_values: referenced_by_ordered, values: values )
-
super values
-
end
-
-
#
-
# the list of keyword is ordered
-
#
-
1
def keyword
-
21
values = super
-
21
values = Deepblue::MetadataHelper.ordered( ordered_values: keyword_ordered, values: values )
-
21
return values
-
end
-
-
1
def keyword=( values )
-
self.keyword_ordered = Deepblue::MetadataHelper.ordered_values( ordered_values: keyword_ordered, values: values )
-
super values
-
end
-
-
#
-
# handle the list of language as ordered
-
#
-
1
def language
-
21
values = super
-
21
values = Deepblue::MetadataHelper.ordered( ordered_values: language_ordered, values: values )
-
21
return values
-
end
-
-
1
def language=( values )
-
self.language_ordered = Deepblue::MetadataHelper.ordered_values( ordered_values: language_ordered, values: values )
-
super values
-
end
-
-
# the list of title is ordered
-
1
def title
-
28
values = super
-
28
values = Deepblue::MetadataHelper.ordered( ordered_values: title_ordered, values: values )
-
28
return values
-
end
-
-
1
def title=( values )
-
7
self.title_ordered = Deepblue::MetadataHelper.ordered_values( ordered_values: title_ordered, values: values )
-
7
super values
-
end
-
-
# end metadata
-
-
-
end
-
# frozen_string_literal: true
-
-
1
module Deepblue
-
-
1
class AbstractEventError < RuntimeError
-
end
-
-
1
module AbstractEventBehavior
-
-
1
EVENT_CHARACTERIZE = 'characterize'
-
1
EVENT_CHILD_ADD = 'child_add'
-
1
EVENT_CHILD_REMOVE = 'child_remove'
-
1
EVENT_CREATE = 'create'
-
1
EVENT_CREATE_DERIVATIVE = 'create_derivative'
-
1
EVENT_DESTROY = 'destroy'
-
1
EVENT_DOWNLOAD = 'download'
-
1
EVENT_EMBARGO = 'embargo'
-
1
EVENT_FIXITY_CHECK = 'fixity_check'
-
1
EVENT_GLOBUS = 'globus'
-
1
EVENT_INGEST = 'ingest'
-
1
EVENT_MIGRATE = 'migrate'
-
1
EVENT_MINT_DOI = 'mint_doi'
-
1
EVENT_PUBLISH = 'publish'
-
1
EVENT_TOMBSTONE = 'tombstone'
-
1
EVENT_UNEMBARGO = 'unembargo'
-
1
EVENT_UNPUBLISH = 'unpublish'
-
1
EVENT_UPDATE = 'update'
-
1
EVENT_UPDATE_AFTER = 'update_after'
-
1
EVENT_UPDATE_BEFORE = 'update_before'
-
1
EVENT_UPDATE_VERSION = 'update_version'
-
1
EVENT_UPLOAD = 'upload'
-
1
EVENT_VIRUS_SCAN = 'virus_scan'
-
1
EVENT_WORKFLOW = 'workflow'
-
EVENTS =
-
[
-
1
EVENT_CHARACTERIZE,
-
EVENT_CHILD_ADD,
-
EVENT_CHILD_REMOVE,
-
EVENT_CREATE,
-
EVENT_CREATE_DERIVATIVE,
-
EVENT_DESTROY,
-
EVENT_DOWNLOAD,
-
EVENT_EMBARGO,
-
EVENT_FIXITY_CHECK,
-
EVENT_GLOBUS,
-
EVENT_INGEST,
-
EVENT_MIGRATE,
-
EVENT_MINT_DOI,
-
EVENT_PUBLISH,
-
EVENT_TOMBSTONE,
-
EVENT_UNEMBARGO,
-
EVENT_UNPUBLISH,
-
EVENT_UPDATE,
-
EVENT_UPDATE_AFTER,
-
EVENT_UPDATE_BEFORE,
-
EVENT_UPDATE_VERSION,
-
EVENT_UPLOAD,
-
EVENT_VIRUS_SCAN,
-
EVENT_WORKFLOW
-
].freeze
-
-
1
IGNORE_BLANK_KEY_VALUES = true
-
1
USE_BLANK_KEY_VALUES = false
-
-
1
def event_attributes_cache_exist?( event:, id:, behavior: nil )
-
2
key = event_attributes_cache_key( event: event, id: id, behavior: behavior )
-
2
rv = Rails.cache.exist?( key )
-
2
rv
-
end
-
-
1
def event_attributes_cache_fetch( event:, id:, behavior: nil )
-
2
key = event_attributes_cache_key( event: event, id: id, behavior: behavior )
-
2
rv = Rails.cache.fetch( key )
-
2
rv
-
end
-
-
1
def event_attributes_cache_key( event:, id:, behavior: nil )
-
10
return "#{id}.#{event}" if behavior.blank?
-
4
"#{id}.#{event}.#{behavior}"
-
end
-
-
1
def event_attributes_cache_write( event:, id:, attributes: DateTime.now, behavior: nil )
-
2
key = event_attributes_cache_key( event: event, id: id, behavior: behavior )
-
2
Rails.cache.write( key, attributes )
-
end
-
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
module Deepblue
-
-
1
module DefaultMetadata
-
1
extend ActiveSupport::Concern
-
-
# Usage notes and expectations can be found in the Metadata Application Profile:
-
# https://docs.google.com/spreadsheets/d/1koKjV7bjn7v4r5a3gsowEimljHiAwbwuOgjHe7FEtuw/edit?usp=sharing
-
-
1
included do # rubocop:disable Metrics/BlockLength
-
-
4
after_initialize :set_default_visibility
-
-
4
def set_default_visibility
-
57
self.visibility = Hydra::AccessControls::AccessRight::VISIBILITY_TEXT_VALUE_PUBLIC if new_record?
-
end
-
-
4
property :additional_information, predicate: ::RDF::Vocab::DC.description do |index|
-
8
index.as :stored_searchable
-
end
-
-
# multiple: false, until "conference" is converted to a nested attribute so that the location, name, and section are all related/stored together
-
4
property :conference_location, predicate: ::RDF::URI.new("http://d-nb.info/standards/elementset/gnd#placeOfConferenceOrEvent"), multiple: false do |index|
-
8
index.as :stored_searchable
-
end
-
-
# multiple: false, until "conference" is converted to a nested attribute so that the location, name, and section are all related/stored together
-
4
property :conference_name, predicate: ::RDF::Vocab::BIBO.presentedAt, multiple: false do |index|
-
8
index.as :stored_searchable, :facetable
-
end
-
-
# multiple: false, until "conference" is converted to a nested attribute so that the location, name, and section are all related/stored together
-
4
property :conference_section, predicate: ::RDF::URI.new("https://w2id.org/scholarlydata/ontology/conference-ontology.owl#Track"), multiple: false do |index|
-
8
index.as :stored_searchable, :facetable
-
end
-
-
# accessor attribute used only to group the date fields and allow proper ordering in the forms
-
4
attr_accessor :dates_section
-
-
4
property :date_accepted, predicate: ::RDF::Vocab::DC.dateAccepted, multiple: false do |index|
-
8
index.as :stored_searchable, :facetable
-
end
-
-
4
property :date_collected, predicate: ::RDF::Vocab::DWC.measurementDeterminedDate do |index|
-
8
index.as :stored_searchable, :facetable
-
end
-
-
4
property :date_reviewed, predicate: ::RDF::URI.new("http://schema.org/lastReviewed") do |index|
-
8
index.as :stored_searchable
-
end
-
-
4
property :date_valid, predicate: ::RDF::Vocab::DC.valid, multiple: false do |index|
-
8
index.as :stored_searchable, :facetable
-
end
-
-
4
property :degree_field, predicate: ::RDF::URI.new("http://vivoweb.org/ontology/core#majorField") do |index|
-
8
index.as :stored_searchable, :facetable
-
end
-
-
# accessor value used by AddOtherFieldOptionActor to persist "Other" values provided by the user
-
4
attr_accessor :degree_field_other
-
-
4
property :degree_level, predicate: ::RDF::URI.new("http://purl.org/NET/UNTL/vocabularies/degree-information/#level"), multiple: false do |index|
-
8
index.as :stored_searchable, :facetable
-
end
-
-
# accessor value used by AddOtherFieldOptionActor to persist "Other" values provided by the user
-
4
attr_accessor :degree_level_other
-
-
# 67 description.thesisdegreename
-
4
property :degree_name, predicate: ::RDF::URI.new("http://purl.org/ontology/bibo/ThesisDegree") do |index|
-
8
index.as :stored_searchable, :facetable
-
end
-
-
# accessor value used by AddOtherFieldOptionActor to persist "Other" values provided by the user
-
4
attr_accessor :degree_name_other
-
-
4
property :digitization_spec, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/conversionSpecifications") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# property :doi, predicate: ::RDF::Vocab::Identifiers.doi, multiple: false do |index|
-
# index.as :stored_searchable, :facetable
-
# end
-
-
4
property :dspace_collection, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/dspaceCollection") do |index|
-
8
index.as :stored_searchable
-
end
-
-
4
property :dspace_community, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/dspaceCommunity") do |index|
-
8
index.as :stored_searchable
-
end
-
-
4
property :file_extent, predicate: ::RDF::Vocab::DC.extent do |index|
-
8
index.as :stored_searchable
-
end
-
-
4
property :funding_body, predicate: ::RDF::Vocab::MARCRelators.fnd do |index|
-
8
index.as :stored_searchable, :facetable
-
end
-
-
4
property :funding_statement, predicate: ::RDF::URI.new("http://datacite.org/schema/kernel-4/fundingReference") do |index|
-
8
index.as :stored_searchable, :facetable
-
end
-
-
4
property :hydrologic_unit_code, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/hydrologicUnitCode") do |index|
-
8
index.as :stored_searchable, :facetable
-
end
-
-
4
property :import_url, predicate: ::RDF::URI.new('http://scholarsphere.psu.edu/ns#importUrl'), multiple: false do |index|
-
8
index.as :stored_searchable
-
end
-
-
4
property :in_series, predicate: ::RDF::URI.new("http://lsdis.cs.uga.edu/projects/semdis/opus#in_series") do |index|
-
8
index.as :stored_searchable
-
end
-
-
4
property :keyword, predicate: ::RDF::Vocab::DC11.subject do |index|
-
8
index.as :stored_searchable
-
end
-
-
4
property :label, predicate: ActiveFedora::RDF::Fcrepo::Model.downloadFilename, multiple: false do |index|
-
8
index.as :stored_searchable
-
end
-
-
4
property :license, predicate: ::RDF::Vocab::DC.rights do |index|
-
8
index.as :stored_searchable, :facetable
-
end
-
-
# property :nested_geo, :predicate => ::RDF::URI("https://purl.org/geojson/vocab#Feature"), :class_name => NestedGeo
-
-
# property :nested_related_items, predicate: ::RDF::Vocab::DC.relation, :class_name => NestedRelatedItems do |index|
-
# index.as :stored_searchable
-
# end
-
-
# accessor value used by AddOtherFieldOptionActor to persist "Other" values provided by the user
-
4
attr_accessor :other_affiliation_other
-
-
4
property :prior_identifier, predicate: ActiveFedora::RDF::Fcrepo::Model.altIds, multiple: true do |index|
-
8
index.as :stored_searchable
-
end
-
-
4
property :related_url, predicate: ::RDF::RDFS.seeAlso do |index|
-
8
index.as :stored_searchable
-
end
-
-
4
property :relative_path, predicate: ::RDF::URI.new('http://scholarsphere.psu.edu/ns#relativePath'), multiple: false do |index|
-
8
index.as :stored_searchable
-
end
-
-
4
property :replaces, predicate: ::RDF::Vocab::DC.replaces, multiple: false do |index|
-
8
index.as :stored_searchable
-
end
-
-
4
property :resource_type, predicate: ::RDF::Vocab::DC.type do |index|
-
8
index.as :stored_searchable, :facetable
-
end
-
-
4
property :rights_statement, predicate: ::RDF::Vocab::EDM.rights do |index|
-
8
index.as :stored_searchable, :facetable
-
end
-
-
4
property :source, predicate: ::RDF::Vocab::DC.source do |index|
-
8
index.as :stored_searchable
-
end
-
-
# START These are ALL the metadata from Dspace
-
-
# 1 contributor author
-
4
property :contributor_author, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/contributorAuthor") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 2 contributor advisor
-
4
property :contributor_advisor, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/contributorAdvisor") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 3 contributor - part of basic metadata
-
4
property :contributor, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/contributorMain") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 4 contributor editor
-
4
property :contributor_editor, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/contributorEditor") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 5 contributor illustrator
-
4
property :contributor_illustrator, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/contributorIllustrator") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 6 contributor
-
4
property :contributor, predicate: ::RDF::Vocab::DC11.contributor do |index|
-
4
index.as :stored_searchable
-
end
-
-
# 7 coverage spatial - part of basic metadata
-
4
property :based_near, predicate: ::RDF::Vocab::DC.spatial, class_name: Hyrax::ControlledVocabularies::Location do |index|
-
8
index.as :stored_searchable, :facetable
-
end
-
-
# 8 coverage temporal
-
4
property :coverage_temporal, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/coverageTemporal") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 9 creator null - part of basic
-
4
property :creator, predicate: ::RDF::Vocab::DC11.creator do |index|
-
8
index.as :stored_searchable, :facetable
-
end
-
-
# 10 date
-
4
property :date, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/date") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 11 date accessioned
-
4
property :date_accessioned, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/dateAccessioned") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 12 date available
-
4
property :date_available, predicate: ::RDF::Vocab::DC.available, multiple: false do |index|
-
8
index.as :stored_searchable, :facetable
-
end
-
-
# 13 date copyright
-
4
property :date_copyright, predicate: ::RDF::Vocab::DC.dateCopyrighted, multiple: false do |index|
-
8
index.as :stored_searchable, :facetable
-
end
-
-
# 14 date created - part of basic metadata
-
4
property :date_created, predicate: ::RDF::Vocab::DC.created, multiple: false do |index|
-
8
index.as :stored_searchable, :facetable
-
end
-
-
# 15 date issued
-
4
property :date_issued, predicate: ::RDF::Vocab::DC.issued, multiple: false do |index|
-
8
index.as :stored_searchable, :facetable
-
end
-
-
# 16 date submitted
-
4
property :date_submitted, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/dateSubmitted") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 17 identifier - part of basic metadata
-
4
property :identifier, predicate: ::RDF::Vocab::DC.identifier do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 18 identifier citation - part of basic metadata
-
4
property :bibliographic_citation, predicate: ::RDF::Vocab::DC.bibliographicCitation
-
-
# 19 identifier govdoc
-
4
property :identifier_govdoc, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/identifierGovdoc") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 20 identifier isbn
-
4
property :isbn, predicate: ::RDF::Vocab::Identifiers.isbn do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 21 identifier issn
-
4
property :issn, predicate: ::RDF::Vocab::Identifiers.issn do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 22 identifier sici
-
4
property :identifier_sici, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/identifierSici") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 23 identifier ismn
-
4
property :identifier_ismn, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/identifierIsmn") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 24 identifier other
-
4
property :identifier_other, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/identifierOther") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 25 identifier uri
-
4
property :identifier_uri, predicate: ::RDF::Vocab::Identifiers.uri do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 26 description
-
4
property :desciption_none, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/descriptionNone") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 26 description null - part of basic metadata
-
4
property :description, predicate: ::RDF::Vocab::DC11.description do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 27 description abstract
-
4
property :description_abstract, predicate: ::RDF::Vocab::DC.abstract do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 28 description provenance
-
4
property :description_provenance, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/descriptionProvenance") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 29 description sponsorship
-
4
property :description_sponsorship, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/descriptionSponsorship") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 30 description statementofresponsibility
-
4
property :description_statementofresponsibility, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/descriptionStatementofresponsibility") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 31 description tableofcontents
-
4
property :tableofcontents, predicate: ::RDF::Vocab::DC.tableOfContents do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 32 description uri
-
4
property :description_uri, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/descriptionUri") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 33 format
-
4
property :file_format, predicate: ::RDF::Vocab::DC.FileFormat do |index|
-
8
index.as :stored_searchable, :facetable
-
end
-
-
# 34 format extent
-
4
property :format_extent, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/formatExtent") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 35 format medium
-
4
property :format_medium, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/formatMedium") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 36 format mimetype
-
4
property :format_mimetype, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/formatMimetype") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 37 language - not needed
-
4
property :language_none, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/languageNone") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 38 language iso - part of basic metadata
-
4
property :language, predicate: ::RDF::Vocab::DC11.language do |index|
-
8
index.as :stored_searchable, :facetable
-
end
-
-
# 39 publisher - part of basic metadata
-
4
property :publisher, predicate: ::RDF::Vocab::DC11.publisher do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 40 relation
-
4
property :relation_none, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/relationNone") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 41 relation isformatof
-
4
property :relation_isformatof, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/relationIsformatof") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 42 relation ispartof
-
4
property :part_of, predicate: ::RDF::Vocab::DC.isPartOf do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 43 relation ispartofseries
-
4
property :relation_ispartofseries, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/relationIspartofseries") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 44 relation haspart
-
4
property :relation_haspart, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/relationHaspart") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 45 relation isversionof
-
4
property :relation_isversionof, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/relationIsversionof") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 46 relation hasversion
-
4
property :relation_hasversion, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/relationHasversion") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 47 relation isbasedon
-
4
property :relation_isbaseson, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/relationIsbasedon") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 48 relation isreferencedby
-
4
property :relation_isreferenceby, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/relationIsreferebcedby") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 49 relation requires
-
4
property :relation_require, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/relationRequires") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 50 relation replaces
-
4
property :relation_replaces, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/relationReplaces") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 51 relation isreplacedby
-
4
property :relation_isrplacedby, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/relationIsreplacedby") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 52 relation uri
-
4
property :relation_uri, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/relationUri") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 53 rights
-
4
property :rights_None, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/rightsNone") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 54 rights uri
-
4
property :rights_uri, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/rightsUri") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 55 source - part of basic metadata
-
4
property :source, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/sourceNone") do |index|
-
4
index.as :stored_searchable
-
end
-
-
# 56 source uri
-
4
property :source_uri, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/sourceUri") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 57 subject - part of basic metadata
-
4
property :subject, predicate: ::RDF::Vocab::DC.subject do |index|
-
8
index.as :stored_searchable, :facetable
-
end
-
-
# 58 subject classification
-
4
property :source_classification, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/sourceClassification") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 59 subject ddc
-
4
property :subject_ddc, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/subjectDdc") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 60 subject lcc
-
4
property :subject_lcc, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/subjectLcc") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 61 subject lcsh
-
4
property :subject_lcsh, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/subjectLcsh") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 62 subject mesh
-
4
property :subject_mesh, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/subjectMesh") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 63 subject other
-
4
property :subject_other, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/subjectOther") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 64 title - part of basic metadata
-
4
property :title, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/titleNone") do |index|
-
4
index.as :stored_searchable
-
end
-
-
# 65 title alternative
-
4
property :alt_title, predicate: ::RDF::Vocab::DC.alternative do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 66 type
-
4
property :type_none, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/typeNone") do |index|
-
8
index.as :stored_searchable
-
end
-
-
4
property :type_snre, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/typeSnre") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 67 description thesisdegreename
-
4
property :description_thesisdegreename, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/descriptionThesisdegreename") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 68 description thesisdegreediscipline
-
4
property :description_thesisdegreediscipline, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/descriptionThesisdegreediscipline") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 69 description thesisdegreegrantor
-
4
property :description_thesisdegreegrantor, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/descriptionThesisdegreegrantor") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 70 contributor committeemember
-
4
property :contributor_committeemember, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/contributorCommitteemember") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 71 rights robots
-
4
property :rights_robots, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/rightsRobots") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 72 subject hlbsecondlevel
-
4
property :subject_hlbsecondlevel, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/subjectHlbsecondlevel") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 73 subject hlbtoplevel
-
4
property :subject_hlbtoplevel, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/subjectHlbtoplevel") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 74 description peerreviewed
-
4
property :peerreviewed, predicate: ::RDF::URI("http://purl.org/ontology/bibo/peerReviewed"), multiple: false do |index|
-
8
index.as :stored_searchable, :facetable
-
end
-
-
# 75 contributor affiliationum
-
4
property :academic_affiliation, predicate: ::RDF::URI("http://vivoweb.org/ontology/core#AcademicDepartment") do |index|
-
8
index.as :stored_searchable, :facetable
-
end
-
-
# 76 contributor affiliationother
-
4
property :other_affiliation, predicate: ::RDF::URI("http://vivoweb.org/ontology/core#Department") do |index|
-
8
index.as :stored_searchable, :facetable
-
end
-
-
# 77 contributor affiliationumcampus
-
4
property :contributor_affiliationumcampus, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/contributorAffiliationumcampus") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 78 identifier uniqname
-
4
property :identifier_uniqname, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/identifierUniqname") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 79 identifier videostream
-
4
property :identifier_videostream, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/identifierVideostream") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 80 identifier pmid
-
4
property :identifier_pmid, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/identifierPmid") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 81 identifier oclc
-
4
property :identifier_oclc, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/identifierOclc") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 82 description withdrawalreason
-
4
property :embargo_reason, predicate: ::RDF::Vocab::DC.accessRights, multiple: false do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 83 description bitstreamurl
-
4
property :description_bitstreamurl, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/descriptionBitstreamurl") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 84 identifier doi
-
4
property :identifier_doi, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/identifierDoi") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 85 identifier source
-
4
property :identifier_source, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/identifierSource") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 86 identifier citedreference
-
4
property :identifier_citedreference, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/identifierCitedreference") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 87 contributor authoremail
-
4
property :contributor_authoremail, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/contributorAuthoremail") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 88 requestcopy email
-
4
property :requestcopy_email, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/requestcopyEmail") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 89 requestcopy name
-
4
property :requestcopy_name, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/requestcopyName") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 90 identifier imageclass
-
4
property :identifier_imageclass, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/identifierImageclass") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 91 description mapping
-
4
property :description_mapping, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/descriptionMapping") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 92 language rfc3066
-
-
# 93 description version
-
4
property :description_version, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/descriptionVersion") do |index|
-
8
index.as :stored_searchable
-
end
-
-
-
# 94 rights holder
-
4
property :rights_holder, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/rightsHolder") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 95 date updated
-
4
property :date_updated, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/dateUpdated") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 96 description md5checksum
-
4
property :description_md5checksum, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/descriptionMd5checksum") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 97 rights access
-
4
property :rights_access, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/rightsAccess") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 99 description hathi
-
4
property :description_hathi, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/descriptionHathi") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 100 description restriction
-
4
property :description_restriction, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/descriptionRestriction") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 101 identifier orcid
-
4
property :identifier_orcid, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/identifierOrcid") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 102 description filedescription
-
4
property :description_filedescription, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/descriptionFiledescription") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 103 date open
-
4
property :date_open, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/dateOpen") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 104 rights copyright
-
4
property :rights_copyright, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/rightsCopyright") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 105 provenance
-
4
property :provenance_none, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/provenanceNone") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 106 rights license
-
4
property :rights_license, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/rightsLicense"), multiple: false do |index|
-
8
index.as :stored_searchable
-
end
-
-
4
property :rights_license_other, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/rightsLicenseOther"), multiple: false do |index|
-
6
index.as :stored_searchable
-
end
-
-
4
property :geo_location_box, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/georss/box"), multiple: false do |index|
-
8
index.as :stored_searchable
-
end
-
-
4
property :geo_location_place, predicate: ::RDF::Vocab::DC11.coverage, multiple: false do |index|
-
8
index.as :stored_searchable
-
end
-
-
4
property :license_other, predicate: ::RDF::Vocab::DC.license, multiple: true do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 166 identifier slug
-
4
property :identifier_slug, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/identifierSlug") do |index|
-
8
index.as :stored_searchable
-
end
-
-
# 167 description depositor - part of basic metadata :depositor
-
# property :depositor, predicate: ::RDF::URI.new("http://opaquenamespace.org/ns/descriptionDepositor") do |index|
-
# index.as :stored_searchable
-
# end
-
-
# END These are ALL the metadata from Dspace
-
-
# accessor attribute used only to group the nested_geo fields and allow proper ordering in the forms
-
4
attr_accessor :geo_section
-
-
# accessor attribute used only to allow validators to check selected options depending on current_user role
-
4
attr_accessor :current_username
-
-
4
class_attribute :controlled_properties
-
#self.controlled_properties = [:based_near]
-
4
self.controlled_properties = []
-
-
4
accepts_nested_attributes_for :based_near, allow_destroy: true, reject_if: proc { |a| a[:id].blank? }
-
# accepts_nested_attributes_for :nested_geo, :allow_destroy => true, :reject_if => :all_blank
-
# accepts_nested_attributes_for :nested_related_items, :allow_destroy => true, :reject_if => :all_blank
-
-
end
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
module Deepblue
-
-
1
class DoiError < RuntimeError
-
end
-
-
1
module DoiBehavior
-
-
1
DOI_MINTING_ENABLED = true
-
1
DOI_PENDING = 'doi_pending'
-
1
DOI_MINIMUM_FILE_COUNT = 1
-
-
1
def doi_minted?
-
!doi.nil?
-
rescue
-
nil
-
end
-
-
1
def doi_minting_enabled?
-
::Deepblue::DoiBehavior::DOI_MINTING_ENABLED
-
end
-
-
1
def doi_pending?
-
doi == DOI_PENDING
-
end
-
-
1
def doi_mint( current_user: nil, event_note: '', enforce_minimum_file_count: true, job_delay: 0 )
-
Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
Deepblue::LoggingHelper.called_from,
-
"work.id=#{id}",
-
"doi=#{doi}",
-
"current_user=#{current_user}",
-
"event_note=#{event_note}",
-
"enforce_minimum_file_count=#{enforce_minimum_file_count}",
-
"job_delay=#{job_delay}" ]
-
return false if doi_pending?
-
# Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
# Deepblue::LoggingHelper.called_from,
-
# "work.id=#{id}",
-
# "past doi_pending?" ]
-
return false if doi_minted?
-
# Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
# Deepblue::LoggingHelper.called_from,
-
# "work.id=#{id}",
-
# "past doi_minted?" ]
-
return false if enforce_minimum_file_count && file_sets.count < DOI_MINIMUM_FILE_COUNT
-
self.doi = DOI_PENDING
-
self.save
-
self.reload
-
current_user = current_user.email if current_user.respond_to? :email
-
Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
Deepblue::LoggingHelper.called_from,
-
"work.id=#{id}",
-
"doi=#{doi}",
-
"about to call DoiMintingJob" ]
-
::DoiMintingJob.perform_later( id, current_user: current_user, job_delay: job_delay )
-
return true
-
rescue Exception => e # rubocop:disable Lint/RescueException
-
Rails.logger.error "DoiBehavior.doi_mint for curation_concern.id #{id} -- #{e.class}: #{e.message} at #{e.backtrace[0]}"
-
end
-
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
module Deepblue
-
-
1
require_relative './abstract_event_behavior'
-
-
1
class EmailError < AbstractEventError
-
end
-
-
1
module EmailBehavior
-
1
include AbstractEventBehavior
-
-
1
def attributes_all_for_email
-
%i[]
-
end
-
-
1
def attributes_brief_for_email
-
%i[]
-
end
-
-
1
def attributes_standard_for_email
-
%i[]
-
end
-
-
1
def attributes_for_email_rds_create
-
return attributes_standard_for_email, USE_BLANK_KEY_VALUES
-
end
-
-
1
def attributes_for_email_rds_destroy
-
return attributes_standard_for_email, USE_BLANK_KEY_VALUES
-
end
-
-
1
def attributes_for_email_rds_globus
-
return attributes_brief_for_email, IGNORE_BLANK_KEY_VALUES
-
end
-
-
1
def attributes_for_email_rds_publish
-
return attributes_standard_for_email, USE_BLANK_KEY_VALUES
-
end
-
-
1
def attributes_for_email_rds_unpublish
-
return attributes_standard_for_email, USE_BLANK_KEY_VALUES
-
end
-
-
1
def attributes_for_email_user_create
-
attributes_standard_for_email
-
end
-
-
1
def email_attribute_values_for_snapshot( attributes:,
-
current_user:,
-
event:,
-
event_note:,
-
to_note:,
-
ignore_blank_key_values:,
-
**added_email_key_values )
-
-
email_key_values = { user_email: for_email_user( current_user ) }
-
email_key_values.merge!( event_note: event_note ) if event_note.present?
-
email_key_values.merge!( to_note: to_note ) if to_note.present?
-
email_key_values.merge!( added_email_key_values ) if added_email_key_values.present?
-
email_key_values = map_email_attributes!( event: event,
-
attributes: attributes,
-
ignore_blank_key_values: ignore_blank_key_values,
-
**email_key_values )
-
email_key_values
-
end
-
-
1
def email_address_rds
-
rv = EmailHelper.notification_email # will be nil on developer's machine
-
rv
-
end
-
-
1
def email_address_rds_deepblue
-
rv = EmailHelper.contact_email
-
rv
-
end
-
-
1
def email_address_user( current_user )
-
rv = EmailHelper.user_email_from current_user
-
rv
-
end
-
-
1
def email_compose_body( message:, email_key_values: )
-
body = StringIO.new
-
body.puts message.to_s if message.present?
-
email_key_values.each_pair do |key, value|
-
label = for_email_label key
-
value = for_email_value( key, value )
-
body.puts "#{label}#{value}"
-
end
-
body.string
-
end
-
-
1
def email_rds_create( current_user:, event_note: '', return_email_parameters: false, send_it: true )
-
attributes, ignore_blank_key_values = attributes_for_email_rds_create
-
email_key_values = {}
-
email_key_values = map_email_attributes!( event: EVENT_CREATE,
-
attributes: attributes,
-
ignore_blank_key_values: ignore_blank_key_values,
-
**email_key_values )
-
email_event_notification( to: email_address_rds,
-
to_note: 'RDS',
-
from: email_address_rds,
-
subject: Deepblue::EmailHelper.t( "hyrax.email.subject.work_created" ),
-
attributes: attributes,
-
current_user: current_user,
-
event: EVENT_CREATE,
-
event_note: event_note,
-
id: for_email_id,
-
ignore_blank_key_values: ignore_blank_key_values,
-
return_email_parameters: return_email_parameters,
-
send_it: send_it,
-
email_key_values: email_key_values )
-
end
-
-
1
def email_rds_destroy( current_user:, event_note: '' )
-
attributes, ignore_blank_key_values = attributes_for_email_rds_destroy
-
email_event_notification( to: email_address_rds,
-
to_note: 'RDS',
-
from: email_address_rds,
-
subject: Deepblue::EmailHelper.t( "hyrax.email.subject.work_deleted" ),
-
attributes: attributes,
-
current_user: current_user,
-
event: EVENT_DESTROY,
-
event_note: event_note,
-
id: for_email_id,
-
ignore_blank_key_values: ignore_blank_key_values )
-
end
-
-
1
def email_rds_globus( current_user:, event_note: )
-
attributes, ignore_blank_key_values = attributes_for_email_rds_globus
-
email_event_notification( to: email_address_rds,
-
to_note: 'RDS',
-
from: email_address_rds,
-
subject: for_email_subject( subject_rest: "Globus #{event_note}" ),
-
attributes: attributes,
-
current_user: current_user,
-
event: EVENT_GLOBUS,
-
event_note: event_note,
-
id: for_email_id,
-
ignore_blank_key_values: ignore_blank_key_values )
-
end
-
-
1
def email_rds_publish( current_user:, event_note: '', message: '' )
-
attributes, ignore_blank_key_values = attributes_for_email_rds_publish
-
email_event_notification( to: email_address_rds,
-
to_note: 'RDS',
-
from: email_address_rds,
-
subject: Deepblue::EmailHelper.t( "hyrax.email.subject.work_published" ),
-
attributes: attributes,
-
current_user: current_user,
-
event: EVENT_PUBLISH,
-
event_note: event_note,
-
message: message,
-
id: for_email_id,
-
ignore_blank_key_values: ignore_blank_key_values )
-
end
-
-
1
def email_rds_unpublish( current_user:, event_note: '' )
-
attributes, ignore_blank_key_values = attributes_for_email_rds_unpublish
-
email_event_notification( to: email_address_rds,
-
to_note: 'RDS',
-
from: email_address_rds,
-
subject: Deepblue::EmailHelper.t( "hyrax.email.subject.work_unpublished" ),
-
attributes: attributes,
-
current_user: current_user,
-
event: EVENT_UNPUBLISH,
-
event_note: event_note,
-
id: for_email_id,
-
ignore_blank_key_values: ignore_blank_key_values )
-
end
-
-
1
def email_user_create( current_user:, event_note: '' )
-
email_event_notification( to: email_address_user( current_user ),
-
to_note: 'user',
-
from: email_address_rds,
-
subject: Deepblue::EmailHelper.t( "hyrax.email.subject.work_created" ),
-
attributes: attributes_for_email_user_create,
-
current_user: current_user,
-
event: EVENT_CREATE,
-
event_note: event_note,
-
id: for_email_id,
-
ignore_blank_key_values: false )
-
end
-
-
1
def email_create_to_user( current_user:, event_note: '' ) # TODO: delete this method
-
email_create( current_user: current_user, event_note: event_note )
-
end
-
-
1
def for_email_class
-
for_email_object.class
-
end
-
-
1
def for_email_id
-
for_email_object.id
-
end
-
-
1
def for_email_ignore_empty_attributes
-
true
-
end
-
-
1
def for_email_label( key )
-
"#{key}: "
-
end
-
-
1
def for_email_object
-
self
-
end
-
-
1
def for_email_route
-
"route to #{for_email_object.id}"
-
end
-
-
1
def for_email_subject( subject_rest: )
-
"DBD: #{subject_rest}"
-
end
-
-
1
def for_email_value( key, value )
-
return '' if value.blank?
-
if value.respond_to? :each
-
value = if 1 == value.size
-
value[0]
-
else
-
value.join( for_email_value_sep( key: key ) )
-
end
-
end
-
value
-
end
-
-
1
def for_email_value_sep( key: )
-
rv = case key.to_s
-
when 'title'
-
' '
-
else
-
'; '
-
end
-
rv
-
end
-
-
1
def for_email_user( current_user )
-
return '' if current_user.blank?
-
return current_user if current_user.is_a? String
-
EmailHelper.user_email_from( current_user )
-
end
-
-
1
def map_email_attributes!( event:, attributes:, ignore_blank_key_values:, **email_key_values )
-
prov_object = for_email_object
-
if attributes.present?
-
attributes.each do |attribute|
-
next if map_email_attributes_override!( event: event,
-
attribute: attribute,
-
ignore_blank_key_values: ignore_blank_key_values,
-
email_key_values: email_key_values )
-
value = case attribute.to_s
-
when 'id'
-
for_email_id
-
when 'location'
-
for_email_route
-
when 'route'
-
for_email_route
-
when 'date_created'
-
prov_object[:date_created].blank? ? '' : prov_object[:date_created]
-
else
-
prov_object[attribute]
-
end
-
value = '' if value.nil?
-
if ignore_blank_key_values
-
email_key_values[attribute] = value if value.present?
-
else
-
email_key_values[attribute] = value
-
end
-
end
-
end
-
email_key_values
-
end
-
-
# override this if there is anything extra to add
-
# return true if handled
-
1
def map_email_attributes_override!( event:, # rubocop:disable Lint/UnusedMethodArgument
-
attribute:, # rubocop:disable Lint/UnusedMethodArgument
-
ignore_blank_key_values:, # rubocop:disable Lint/UnusedMethodArgument
-
email_key_values: ) # rubocop:disable Lint/UnusedMethodArgument
-
-
handled = false
-
return handled
-
end
-
-
1
protected
-
-
1
def email_event_notification( to:,
-
to_note:,
-
from:,
-
subject:,
-
attributes:,
-
current_user:,
-
event:,
-
event_note:,
-
message: '',
-
ignore_blank_key_values:,
-
id:,
-
return_email_parameters: false,
-
send_it: true,
-
email_key_values: nil )
-
-
if email_key_values.blank?
-
email_key_values = email_attribute_values_for_snapshot( attributes: attributes,
-
current_user: current_user,
-
event: event,
-
event_note: event_note,
-
to_note: to_note,
-
ignore_blank_key_values: ignore_blank_key_values )
-
end
-
event_attributes_cache_write( event: event, id: id, behavior: :EmailBehavior )
-
body = email_compose_body( message: message, email_key_values: email_key_values )
-
EmailHelper.send_email( to: to, from: from, subject: subject, body: body ) if send_it
-
class_name = for_email_class.name
-
EmailHelper.log( class_name: class_name,
-
current_user: current_user,
-
event: event,
-
event_note: event_note,
-
id: id,
-
to: to,
-
from: from,
-
subject: subject,
-
message: message,
-
body: body,
-
**email_key_values ) if send_it
-
return nil unless return_email_parameters
-
parameters = { to: to,
-
to_note: to_note,
-
from: from,
-
subject: subject,
-
message: message,
-
body: body,
-
current_user: current_user,
-
event: event,
-
event_note: event_note,
-
id: id,
-
email_key_values: email_key_values }
-
return parameters
-
end
-
-
end
-
-
end
-
# don't freeze, it causes errors
-
-
# require File.join(Gem::Specification.find_by_name("hydra-access-controls").full_gem_path, "app/models/concerns/hydra/access_controls/embargoable_behavior.rb")
-
-
1
module Deepblue
-
# monkey patch Embargoable#deactivate_embargo!
-
1
module EmbargoableBehavior
-
1
extend ActiveSupport::Concern
-
-
1
included do
-
2
alias_method :monkey_deactivate_embargo!, :deactivate_embargo!
-
2
alias_method :monkey_deactivate_lease!, :deactivate_lease!
-
end
-
-
# Deactivates the embargo and logs a message to the embargo object.
-
# Marks this record as dirty so that it will get reindexed.
-
1
def deactivate_embargo!( current_user: nil )
-
return if embargo.nil?
-
# embargo.deactivate! whipes out work.visibility_after_embargo before it can be applied, so save it and apply it
-
vis_after = visibility_after_embargo
-
vis_after = visibility_after_embargo_default if vis_after.nil?
-
provenance_unembargo( current_user: Deepblue::ProvenanceHelper.system_as_current_user,
-
embargo_visibility: visibility,
-
embargo_visibility_after: vis_after )
-
# Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
# Deepblue::LoggingHelper.called_from,
-
# "before",
-
# "vis_after=#{vis_after}",
-
# "id=#{id}",
-
# "embargo_release_date=#{embargo_release_date}",
-
# "visibility=#{visibility}",
-
# "visibility_during_embargo=#{visibility_during_embargo}",
-
# "visibility_after_embargo=#{visibility_after_embargo}" ]
-
embargo.deactivate!
-
# Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
# Deepblue::LoggingHelper.called_from,
-
# "after",
-
# "id=#{id}",
-
# "embargo_release_date=#{embargo_release_date}",
-
# "visibility=#{visibility}",
-
# "visibility_during_embargo=#{visibility_during_embargo}",
-
# "visibility_after_embargo=#{visibility_after_embargo}" ]
-
self.visibility = vis_after
-
visibility_will_change!
-
end
-
-
1
def deactivate_lease!
-
return if lease.nil?
-
# lease.deactivate! whipes out work.visibility_after_lease before it can be applied, so save it and apply
-
vis_after = visibility_after_lease
-
vis_after = visibility_after_lease_default if vis_after.nil?
-
lease.deactivate!
-
self.visibility = vis_after
-
visibility_will_change!
-
end
-
-
# Set the current visibility to match what is described in the embargo.
-
1
def embargo_visibility!
-
return unless embargo_release_date
-
if under_embargo?
-
self.visibility_during_embargo = visibility_during_embargo ? visibility_during_embargo : visibility_during_embargo_default
-
self.visibility_after_embargo = visibility_after_embargo ? visibility_after_embargo : visibility_after_embargo_default
-
self.visibility = visibility_during_embargo
-
else
-
self.visibility = visibility_after_embargo ? visibility_after_embargo : visibility_after_embargo_default
-
end
-
end
-
-
# Set the current visibility to match what is described in the lease.
-
1
def lease_visibility!
-
if lease_expiration_date
-
if active_lease?
-
self.visibility_during_lease = visibility_during_lease ? visibility_during_lease : visibility_during_lease_default
-
self.visibility_after_lease = visibility_after_lease ? visibility_after_lease : visibility_after_lease_default
-
self.visibility = visibility_during_lease
-
else
-
self.visibility = visibility_after_lease ? visibility_after_lease : visibility_after_lease_default
-
end
-
end
-
end
-
-
1
def visibility_after_embargo_default
-
::DeepBlueDocs::Application.config.embargo_visibility_after_default_status
-
end
-
-
1
def visibility_after_lease_default
-
::Hydra::AccessControls::AccessRight::VISIBILITY_TEXT_VALUE_PRIVATE
-
end
-
-
1
def visibility_during_embargo_default
-
::DeepBlueDocs::Application.config.embargo_visibility_during_default_status
-
end
-
-
1
def visibility_during_lease_default
-
::Hydra::AccessControls::AccessRight::VISIBILITY_TEXT_VALUE_AUTHENTICATED
-
end
-
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
module Deepblue
-
-
1
require_relative '../../../services/deepblue/virus_scan_service'
-
-
1
module FileSetBehavior
-
1
extend ActiveSupport::Concern
-
-
1
include ::Deepblue::VirusScanService
-
-
1
included do
-
-
1
after_initialize :set_deepblue_file_set_defaults
-
-
1
def set_deepblue_file_set_defaults
-
21
return unless new_record?
-
# self.file_size = 0
-
# self.visibility = 'open'
-
end
-
-
end
-
-
# versioning
-
-
1
def versions
-
ofile = original_file
-
return [] if ofile.nil?
-
rv = ofile.versions
-
return [] if rv.nil?
-
rv = rv.all
-
end
-
-
1
def latest_version
-
versions.last
-
end
-
-
1
def latest_version_create_datetime
-
version_datetime latest_version
-
end
-
-
1
def version_count
-
vers = versions
-
return 0 if vers.nil?
-
vers.count
-
end
-
-
1
def version_datetime( version )
-
return nil if version.nil?
-
return '' if version.created.blank?
-
DateTime.parse version.created
-
end
-
-
1
def version_datetime_display( version )
-
timestamp = version_datetime( version )
-
DeepblueHelper.display_timestamp timestamp
-
end
-
-
1
def update_parent
-
return if parent.nil?
-
parent.total_file_size_add_file_set!( self )
-
end
-
-
# virus scanning
-
-
1
def virus_scan
-
LoggingHelper.bold_debug [ LoggingHelper.here, LoggingHelper.called_from, "original_file = #{original_file}" ]
-
# check file size here to avoid making a temp copy of the file in VirusCheckerService
-
needed = virus_scan_needed?
-
if needed && virus_scan_file_too_big?
-
virus_scan_status_update( scan_result: VIRUS_SCAN_SKIPPED_TOO_BIG )
-
elsif needed
-
# TODO: figure out how to retry the virus scan as this only works for ( original_file && original_file.new_record? )
-
scan_result = Hydra::Works::VirusCheckerService.file_has_virus? original_file
-
virus_scan_status_update( scan_result: scan_result, previous_scan_result: virus_scan_status )
-
else
-
logger.info "Virus scan not needed." # TODO: improve message
-
end
-
end
-
-
1
def virus_scan_file_too_big?
-
fsize = virus_scan_file_size
-
return false if fsize.blank?
-
rv = fsize.to_i > DeepBlueDocs::Application.config.virus_scan_max_file_size
-
return rv
-
end
-
-
1
def virus_scan_file_size
-
if file_size.blank?
-
if original_file.nil?
-
0
-
elsif original_file.respond_to? :size
-
original_file.size
-
else
-
0
-
end
-
else
-
# file_size[0]
-
file_size_value
-
end
-
end
-
-
1
def virus_scan_needed?
-
# really, it's always needed.
-
true
-
# LoggingHelper.bold_debug [ LoggingHelper.here, LoggingHelper.called_from ]
-
# return true if original_file && original_file.new_record?
-
# return false unless DeepBlueDocs::Application.config.virus_scan_retry
-
# scan_status = virus_scan_status
-
# return true if scan_status.blank?
-
# case scan_status
-
# when VIRUS_SCAN_NOT_VIRUS
-
# false
-
# when VIRUS_SCAN_VIRUS
-
# false
-
# when VIRUS_SCAN_SKIPPED_TOO_BIG
-
# false
-
# when VIRUS_SCAN_SKIPPED_SERVICE_UNAVAILABLE
-
# DeepBlueDocs::Application.config.virus_scan_retry_on_service_unavailable
-
# when VIRUS_SCAN_ERROR
-
# DeepBlueDocs::Application.config.virus_scan_retry_on_error
-
# when VIRUS_SCAN_UNKNOWN
-
# DeepBlueDocs::Application.config.virus_scan_retry_on_unknown
-
# else
-
# true
-
# end
-
end
-
-
1
def virus_scan_retry?
-
return !( original_file && original_file.new_record? )
-
end
-
-
1
def virus_scan_status_update( scan_result:, previous_scan_result: nil )
-
LoggingHelper.bold_debug [ LoggingHelper.here,
-
LoggingHelper.called_from,
-
"scan_result=#{scan_result}",
-
"previous_scan_result=#{previous_scan_result}" ]
-
# Oops. Really don't want to consider previous result as we want the new timestamp
-
# return scan_result if previous_scan_result.present? && scan_result == previous_scan_result
-
# for some reason, this does not save the attributes
-
# virus_scan_service = virus_scan_service_name
-
# virus_scan_status = scan_result
-
# virus_scan_status_date = virus_scan_timestamp_now
-
# but this does save the attributes
-
self['virus_scan_service'] = virus_scan_service_name
-
self['virus_scan_status'] = scan_result
-
self['virus_scan_status_date'] = virus_scan_timestamp_now
-
save! # ( validate: false )
-
provenance_virus_scan( scan_result: scan_result ) # if respond_to? :provenance_virus_scan
-
return scan_result
-
end
-
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
module Deepblue
-
1
module FileSetMetadata
-
1
extend ActiveSupport::Concern
-
-
1
included do
-
-
1
property :curation_notes_admin, predicate: ::RDF::URI.new('https://deepblue.lib.umich.edu/data/help.help#curation_notes_admin'), multiple: true do |index|
-
2
index.type :text
-
2
index.as :stored_searchable
-
end
-
-
1
property :curation_notes_admin_ordered, predicate: ::RDF::URI.new('https://deepblue.lib.umich.edu/data/help.help#curation_notes_admin_ordered'), multiple: false do |index|
-
2
index.type :text
-
2
index.as :stored_searchable
-
end
-
-
1
property :curation_notes_user, predicate: ::RDF::URI.new('https://deepblue.lib.umich.edu/data/help.help#curation_notes_user'), multiple: true do |index|
-
2
index.type :text
-
2
index.as :stored_searchable
-
end
-
-
1
property :curation_notes_user_ordered, predicate: ::RDF::URI.new('https://deepblue.lib.umich.edu/data/help.help#curation_notes_user_ordered'), multiple: false do |index|
-
2
index.type :text
-
2
index.as :stored_searchable
-
end
-
-
# property :file_size, predicate: ::RDF::Vocab::DC.SizeOrDuration, multiple: false
-
1
property :file_size, predicate: ::RDF::Vocab::DC.SizeOrDuration, multiple: true
-
-
1
property :prior_identifier, predicate: ActiveFedora::RDF::Fcrepo::Model.altIds, multiple: true do |index|
-
2
index.as :stored_searchable
-
end
-
-
# TODO: can't use the same predicate twice
-
# property :total_file_size_human_readable, predicate: ::RDF::Vocab::DC.SizeOrDuration, multiple: false
-
-
1
property :virus_scan_service, predicate: ::RDF::URI.new('https://deepblue.lib.umich.edu/data/help.help#virus_scan_service'), multiple: false do |index|
-
2
index.type :text
-
2
index.as :stored_searchable
-
end
-
-
1
property :virus_scan_status, predicate: ::RDF::URI.new('https://deepblue.lib.umich.edu/data/help.help#virus_scan_status'), multiple: false do |index|
-
2
index.type :text
-
2
index.as :stored_searchable
-
end
-
-
1
property :virus_scan_status_date, predicate: ::RDF::URI.new('https://deepblue.lib.umich.edu/data/help.help#virus_scan_status_date'), multiple: false do |index|
-
2
index.type :text
-
2
index.as :stored_searchable
-
end
-
-
end
-
-
end
-
end
-
# frozen_string_literal: true
-
-
1
module Deepblue
-
-
1
class MetadataError < RuntimeError
-
end
-
-
1
module MetadataBehavior
-
-
1
METADATA_FIELD_SEP = '; '
-
1
METADATA_REPORT_DEFAULT_DEPTH = 2
-
1
METADATA_REPORT_DEFAULT_FILENAME_POST = '_metadata_report'
-
1
METADATA_REPORT_DEFAULT_FILENAME_EXT = '.txt'
-
-
1
def for_metadata_id
-
6
self.id
-
end
-
-
1
def for_metadata_route
-
"route to #{id}"
-
end
-
-
1
def for_metadata_title
-
4
self.title
-
end
-
-
1
def metadata_keys_all
-
1
%i[]
-
end
-
-
1
def metadata_keys_report
-
1
%i[]
-
end
-
-
1
def metadata_keys_brief
-
1
%i[]
-
end
-
-
1
def metadata_hash( metadata_keys:, ignore_blank_values:, **key_values )
-
6
return {} if metadata_keys.blank?
-
4
key_values = {} if key_values.nil?
-
4
metadata_keys.each do |key|
-
10
next if metadata_hash_override( key: key, ignore_blank_values: ignore_blank_values, key_values: key_values )
-
8
value = case key.to_s
-
when 'id'
-
4
for_metadata_id
-
when 'location'
-
for_metadata_route
-
when 'route'
-
for_metadata_route
-
when 'title'
-
4
for_metadata_title
-
when 'visibility'
-
metadata_report_visibility_value( self.visibility )
-
else
-
self[key]
-
end
-
8
value = '' if value.nil?
-
8
if ignore_blank_values
-
key_values[key] = value if value.present?
-
else
-
8
key_values[key] = value
-
end
-
end
-
4
key_values
-
end
-
-
# override this if there is anything extra to add
-
# return true if handled
-
1
def metadata_hash_override( key:, ignore_blank_values:, key_values: ) # rubocop:disable Lint/UnusedMethodArgument
-
1
handled = false
-
1
return handled
-
end
-
-
1
def metadata_report( dir: nil,
-
out: nil,
-
depth: METADATA_REPORT_DEFAULT_DEPTH,
-
filename_pre: '',
-
filename_post: METADATA_REPORT_DEFAULT_FILENAME_POST,
-
filename_ext: METADATA_REPORT_DEFAULT_FILENAME_EXT )
-
-
raise MetadataError, "Either dir: or out: must be specified." if dir.nil? && out.nil?
-
if out.nil?
-
target_file = metadata_report_filename( pathname_dir: dir,
-
filename_pre: filename_pre,
-
filename_post: filename_post,
-
filename_ext: filename_ext )
-
open( target_file, 'w' ) do |out2|
-
metadata_report( out: out2, depth: depth )
-
end
-
return target_file
-
else
-
report_title = metadata_report_title( depth: depth )
-
out.puts report_title
-
ignore_blank_values, metadata_keys = metadata_report_keys
-
metadata = metadata_hash( metadata_keys: metadata_keys, ignore_blank_values: ignore_blank_values )
-
metadata_report_to( out: out, metadata_hash: metadata, depth: depth )
-
# Don't include metadata reports for contained objects, such as file_sets
-
# contained_objects = metadata_report_contained_objects
-
# if contained_objects.count.positive?
-
# contained_objects.each do |obj|
-
# next unless obj.respond_to? :metadata_report
-
# out.puts
-
# obj.metadata_report( out: out, depth: depth + 1 )
-
# end
-
# end
-
return nil
-
end
-
end
-
-
1
def metadata_report_contained_objects
-
1
[]
-
end
-
-
1
def metadata_report_filename( pathname_dir:,
-
filename_pre:,
-
filename_post: METADATA_REPORT_DEFAULT_FILENAME_POST,
-
filename_ext: METADATA_REPORT_DEFAULT_FILENAME_EXT )
-
-
2
pathname_dir.join "#{filename_pre}#{for_metadata_id}#{filename_post}#{filename_ext}"
-
end
-
-
1
def metadata_report_keys
-
1
return AbstractEventBehavior::IGNORE_BLANK_KEY_VALUES, metadata_keys_report
-
end
-
-
1
def metadata_report_label( metadata_key:, metadata_value: )
-
return nil if metadata_key.blank?
-
label = metadata_report_label_override(metadata_key: metadata_key, metadata_value: metadata_value )
-
return label if label.present?
-
label = case metadata_key.to_s
-
when 'id'
-
'ID: '
-
when 'location'
-
'Location: '
-
when 'route'
-
'Route: '
-
when 'title'
-
'Title: '
-
when 'visibility'
-
'Visibility: '
-
else
-
"#{metadata_key.to_s.titlecase}: "
-
end
-
label
-
end
-
-
# override this if there is anything extra to add
-
# return nil if not handled
-
1
def metadata_report_label_override( metadata_key:, metadata_value: ) # rubocop:disable Lint/UnusedMethodArgument
-
1
label = nil
-
1
return label
-
end
-
-
1
def metadata_report_title( depth:,
-
header_begin: '=',
-
header_end: '=' )
-
-
report_title = for_metadata_title
-
report_title = report_title.join( metadata_report_title_field_sep ) if report_title.respond_to? :join
-
if depth.positive?
-
"#{header_begin * depth} #{metadata_report_title_pre}#{report_title} #{header_end * depth}"
-
else
-
"#{metadata_report_title_pre}#{report_title}"
-
end
-
end
-
-
1
def metadata_report_title_pre
-
1
''
-
end
-
-
1
def metadata_report_title_field_sep
-
1
' '
-
end
-
-
1
def metadata_report_to( out:, metadata_hash:, depth: 0 )
-
return if out.nil?
-
metadata_hash.each_pair do |key, value|
-
metadata_report_item_to( out: out, key: key, value: value, depth: depth )
-
end
-
end
-
-
1
def metadata_report_item_to( out:, key:, value:, depth: ) # rubocop:disable Lint/UnusedMethodArgument
-
label = metadata_report_label(metadata_key: key, metadata_value: value )
-
MetadataHelper.report_item( out, label, value )
-
end
-
-
1
def metadata_report_visibility_value( visibility )
-
case visibility
-
when Hydra::AccessControls::AccessRight::VISIBILITY_TEXT_VALUE_PUBLIC
-
'published'
-
when Hydra::AccessControls::AccessRight::VISIBILITY_TEXT_VALUE_PRIVATE
-
'private'
-
else
-
visibility
-
end
-
end
-
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
module Deepblue
-
-
1
require_relative './abstract_event_behavior'
-
-
1
class ProvenanceLogError < AbstractEventError
-
end
-
-
1
module ProvenanceBehavior
-
1
include AbstractEventBehavior
-
-
1
def attributes_all_for_provenance
-
%i[]
-
end
-
-
1
def attributes_brief_for_provenance
-
%i[]
-
end
-
-
1
def attributes_virus_for_provenance
-
attributes_brief_for_provenance
-
end
-
-
1
def attributes_update_for_provenance
-
attributes_all_for_provenance
-
end
-
-
1
def attributes_for_provenance_add
-
return attributes_brief_for_provenance, IGNORE_BLANK_KEY_VALUES
-
end
-
-
1
def attributes_for_provenance_characterize
-
return attributes_brief_for_provenance, IGNORE_BLANK_KEY_VALUES
-
end
-
-
1
def attributes_for_provenance_create
-
return attributes_all_for_provenance, USE_BLANK_KEY_VALUES
-
end
-
-
1
def attributes_for_provenance_create_derivative
-
return attributes_brief_for_provenance, USE_BLANK_KEY_VALUES
-
end
-
-
1
def attributes_for_provenance_destroy
-
return attributes_all_for_provenance, USE_BLANK_KEY_VALUES
-
end
-
-
1
def attributes_for_provenance_embargo
-
return attributes_all_for_provenance, USE_BLANK_KEY_VALUES
-
end
-
-
1
def attributes_for_provenance_fixity_check
-
return attributes_brief_for_provenance, IGNORE_BLANK_KEY_VALUES
-
end
-
-
1
def attributes_for_provenance_ingest
-
return attributes_all_for_provenance, USE_BLANK_KEY_VALUES
-
end
-
-
1
def attributes_for_provenance_migrate
-
return attributes_brief_for_provenance, IGNORE_BLANK_KEY_VALUES
-
end
-
-
1
def attributes_for_provenance_mint_doi
-
2
return attributes_all_for_provenance, USE_BLANK_KEY_VALUES
-
end
-
-
1
def attributes_for_provenance_publish
-
2
return attributes_all_for_provenance, USE_BLANK_KEY_VALUES
-
end
-
-
1
def attributes_for_provenance_tombstone
-
return attributes_all_for_provenance, USE_BLANK_KEY_VALUES
-
end
-
-
1
def attributes_for_provenance_unembargo
-
return attributes_all_for_provenance, USE_BLANK_KEY_VALUES
-
end
-
-
1
def attributes_for_provenance_unpublish
-
2
return attributes_all_for_provenance, USE_BLANK_KEY_VALUES
-
end
-
-
1
def attributes_for_provenance_update
-
2
return attributes_update_for_provenance, IGNORE_BLANK_KEY_VALUES
-
end
-
-
1
def attributes_for_provenance_update_version
-
return attributes_update_for_provenance, IGNORE_BLANK_KEY_VALUES
-
end
-
-
1
def attributes_for_provenance_upload
-
return attributes_all_for_provenance, USE_BLANK_KEY_VALUES
-
end
-
-
1
def attributes_for_provenance_virus_scan
-
return attributes_virus_for_provenance, IGNORE_BLANK_KEY_VALUES
-
end
-
-
1
def attributes_for_provenance_workflow
-
return attributes_brief_for_provenance, IGNORE_BLANK_KEY_VALUES
-
end
-
-
1
def attributes_cache_fetch( event:, id: for_provenance_id )
-
key = attributes_cache_key( event: event, id: id )
-
rv = Rails.cache.fetch( key )
-
rv
-
end
-
-
1
def attributes_cache_key( event:, id: )
-
"#{id}.#{event}"
-
end
-
-
1
def attributes_cache_write( event:, id: for_provenance_id, attributes: )
-
key = attributes_cache_key( event: event, id: id )
-
Rails.cache.write( key, attributes )
-
end
-
-
1
def for_provenance_event_cache_exist?( event:, id: for_provenance_id )
-
key = for_provenance_event_cache_key( event: event, id: id )
-
rv = Rails.cache.exist?( key )
-
rv
-
end
-
-
1
def for_provenance_event_cache_fetch( event:, id: for_provenance_id )
-
key = for_provenance_event_cache_key( event: event, id: id )
-
rv = Rails.cache.fetch( key )
-
rv
-
end
-
-
1
def for_provenance_event_cache_key( event:, id: )
-
1
"#{id}.#{event}.provenance"
-
end
-
-
1
def for_provenance_event_cache_write( event:, id: for_provenance_id, value: DateTime.now )
-
1
key = for_provenance_event_cache_key( event: event, id: id )
-
1
Rails.cache.write( key, value, expires_in: 12.hours )
-
end
-
-
1
def for_provenance_class
-
1
for_provenance_object.class
-
end
-
-
1
def for_provenance_id
-
3
for_provenance_object.id
-
end
-
-
1
def for_provenance_ignore_empty_attributes
-
true
-
end
-
-
1
def for_provenance_object
-
10
self
-
end
-
-
1
def for_provenance_route
-
"route to #{for_provenance_object.id}"
-
end
-
-
1
def for_provenance_user( current_user )
-
4
return '' if current_user.blank?
-
4
return current_user if current_user.is_a? String
-
EmailHelper.user_email_from( current_user )
-
end
-
-
1
def map_provenance_attributes!( event:, attributes:, ignore_blank_key_values:, **prov_key_values )
-
4
prov_object = for_provenance_object
-
# prov_object_class = prov_object.class.name
-
4
if attributes.present?
-
4
attributes.each do |attribute|
-
48
next if map_provenance_attributes_override!( event: event,
-
attribute: attribute,
-
ignore_blank_key_values: ignore_blank_key_values,
-
prov_key_values: prov_key_values )
-
44
value = case attribute.to_s
-
when 'id'
-
for_provenance_id
-
when 'location'
-
for_provenance_route
-
when 'route'
-
for_provenance_route
-
when 'date_created'
-
3
prov_object[:date_created].blank? ? '' : prov_object[:date_created]
-
else
-
41
if prov_object.has_attribute? attribute
-
41
prov_object[attribute]
-
else
-
'MISSING_ATTRIBUTE'
-
end
-
# begin
-
# prov_object[attribute]
-
# rescue Exception => e
-
# puts "attribute='#{attribute}' #{e}"
-
# raise e
-
# end
-
end
-
44
value = '' if value.nil?
-
44
if ignore_blank_key_values
-
2
prov_key_values[attribute] = value if value.present?
-
else
-
42
prov_key_values[attribute] = value
-
end
-
end
-
end
-
1
prov_key_values
-
end
-
-
1
def map_provenance_attributes_for_update( current_user, event_note, provenance_attribute_values_before_update )
-
# Rails.logger.debug ">>>>>>"
-
# Rails.logger.debug "map_provenance_attributes_for_update"
-
# Rails.logger.debug "provenance_attribute_values_before_update=#{ActiveSupport::JSON.encode provenance_attribute_values_before_update}"
-
# Rails.logger.debug ">>>>>>"
-
return nil if provenance_attribute_values_before_update.blank?
-
attributes, ignore_blank_key_values = attributes_for_provenance_update
-
prov_key_values = provenance_attribute_values_for_snapshot( attributes: attributes,
-
current_user: current_user,
-
event: EVENT_UPDATE,
-
event_note: event_note,
-
ignore_blank_key_values: ignore_blank_key_values )
-
# only the changed values
-
# Rails.logger.debug ">>>>>>"
-
# Rails.logger.debug "map_provenance_attributes_for_update"
-
# Rails.logger.debug "before reject=#{ActiveSupport::JSON.encode prov_key_values}"
-
# Rails.logger.debug ">>>>>>"
-
prov_key_values.reject! { |attribute, value| value == provenance_attribute_values_before_update[attribute] }
-
# Rails.logger.debug ">>>>>>"
-
# Rails.logger.debug "map_provenance_attributes_for_update"
-
# Rails.logger.debug "after reject=#{ActiveSupport::JSON.encode prov_key_values}"
-
# Rails.logger.debug ">>>>>>"
-
prov_key_values
-
end
-
-
# override this if there is anything extra to add
-
# return true if handled
-
1
def map_provenance_attributes_override!( event:, # rubocop:disable Lint/UnusedMethodArgument
-
attribute:, # rubocop:disable Lint/UnusedMethodArgument
-
ignore_blank_key_values:, # rubocop:disable Lint/UnusedMethodArgument
-
prov_key_values: ) # rubocop:disable Lint/UnusedMethodArgument
-
-
handled = false
-
return handled
-
end
-
-
1
def provenance_attribute_values_for_snapshot( attributes:,
-
current_user:,
-
event:,
-
event_note:,
-
ignore_blank_key_values:,
-
**added_prov_key_values )
-
-
4
prov_key_values = ProvenanceHelper.logger_initialize_key_values(user_email: for_provenance_user(current_user ),
-
event_note: event_note,
-
**added_prov_key_values )
-
4
prov_key_values = map_provenance_attributes!( event: event,
-
attributes: attributes,
-
ignore_blank_key_values: ignore_blank_key_values,
-
**prov_key_values )
-
1
prov_key_values
-
end
-
-
1
def provenance_attribute_values_for_update( current_user:, event_note: '' )
-
attributes, _ignore_blank_key_values = attributes_for_provenance_update
-
provenance_attribute_values_for_snapshot( attributes: attributes,
-
current_user: current_user,
-
event: EVENT_UPDATE,
-
event_note: event_note,
-
ignore_blank_key_values: false )
-
end
-
-
1
def provenance_characterize( current_user:, event_note: '', calling_class:, **added_prov_key_values )
-
event = EVENT_CHARACTERIZE
-
attributes, ignore_blank_key_values = attributes_for_provenance_add
-
added_prov_key_values = { calling_class: calling_class }.merge added_prov_key_values
-
prov_key_values = provenance_attribute_values_for_snapshot( attributes: attributes,
-
current_user: current_user,
-
event: event,
-
event_note: event_note,
-
ignore_blank_key_values: ignore_blank_key_values,
-
**added_prov_key_values )
-
attributes, ignore_blank_key_values = attributes_for_provenance_characterize
-
provenance_log_event( attributes: attributes,
-
current_user: current_user,
-
event: event,
-
event_note: event_note,
-
ignore_blank_key_values: ignore_blank_key_values,
-
prov_key_values: prov_key_values )
-
end
-
-
1
def provenance_child_add( current_user:, child_id:, event_note: '', **added_prov_key_values )
-
event = EVENT_CHILD_ADD
-
added_prov_key_values = { child_id: child_id }.merge added_prov_key_values
-
attributes, ignore_blank_key_values = attributes_for_provenance_add
-
prov_key_values = provenance_attribute_values_for_snapshot( attributes: attributes,
-
current_user: current_user,
-
event: event,
-
event_note: event_note,
-
ignore_blank_key_values: ignore_blank_key_values,
-
**added_prov_key_values )
-
provenance_log_event( attributes: nil,
-
current_user: current_user,
-
event: event,
-
event_note: event_note,
-
ignore_blank_key_values: true,
-
prov_key_values: prov_key_values )
-
end
-
-
1
def provenance_child_remove( current_user:, child_id:, event_note: '', **added_prov_key_values )
-
event = EVENT_CHILD_REMOVE
-
added_prov_key_values = { child_id: child_id }.merge added_prov_key_values
-
attributes, ignore_blank_key_values = attributes_for_provenance_add
-
prov_key_values = provenance_attribute_values_for_snapshot( attributes: attributes,
-
current_user: current_user,
-
event: event,
-
event_note: event_note,
-
ignore_blank_key_values: ignore_blank_key_values,
-
**added_prov_key_values )
-
provenance_log_event( attributes: nil,
-
current_user: current_user,
-
event: event,
-
event_note: event_note,
-
ignore_blank_key_values: true,
-
prov_key_values: prov_key_values )
-
end
-
-
1
def provenance_create( current_user:, event_note: '' )
-
attributes, ignore_blank_key_values = attributes_for_provenance_create
-
provenance_log_event( attributes: attributes,
-
current_user: current_user,
-
event: EVENT_CREATE,
-
event_note: event_note,
-
ignore_blank_key_values: ignore_blank_key_values )
-
end
-
-
1
def provenance_create_derivative( current_user:, event_note: '', calling_class:, **added_prov_key_values )
-
event = EVENT_CREATE_DERIVATIVE
-
attributes, ignore_blank_key_values = attributes_for_provenance_add
-
added_prov_key_values = { calling_class: calling_class }.merge added_prov_key_values
-
prov_key_values = provenance_attribute_values_for_snapshot( attributes: attributes,
-
current_user: current_user,
-
event: event,
-
event_note: event_note,
-
ignore_blank_key_values: ignore_blank_key_values,
-
**added_prov_key_values )
-
attributes, ignore_blank_key_values = attributes_for_provenance_create_derivative
-
provenance_log_event( attributes: attributes,
-
current_user: current_user,
-
event: event,
-
event_note: event_note,
-
ignore_blank_key_values: ignore_blank_key_values,
-
prov_key_values: prov_key_values )
-
end
-
-
1
def provenance_destroy( current_user:, event_note: '' )
-
unless DeepBlueDocs::Application.config.provenance_log_redundant_events
-
return if for_provenance_event_cache_exist?( event: EVENT_DESTROY )
-
end
-
attributes, ignore_blank_key_values = attributes_for_provenance_destroy
-
provenance_log_event( attributes: attributes,
-
current_user: current_user,
-
event: EVENT_DESTROY,
-
event_note: event_note,
-
ignore_blank_key_values: ignore_blank_key_values )
-
end
-
-
1
def provenance_embargo( current_user:, event_note: '', **embargo_values )
-
# ::Deepblue::LoggingHelper.bold_debug [ ::Deepblue::LoggingHelper.here,
-
# ::Deepblue::LoggingHelper.called_from,
-
# "embargo_values=#{embargo_values}",
-
# "" ]
-
attributes, ignore_blank_key_values = attributes_for_provenance_embargo
-
prov_key_values = provenance_attribute_values_for_snapshot( attributes: attributes,
-
current_user: current_user,
-
event: EVENT_EMBARGO,
-
event_note: event_note,
-
ignore_blank_key_values: ignore_blank_key_values,
-
**embargo_values )
-
provenance_log_event( attributes: attributes,
-
current_user: current_user,
-
event: EVENT_EMBARGO,
-
event_note: event_note,
-
ignore_blank_key_values: ignore_blank_key_values,
-
prov_key_values: prov_key_values )
-
end
-
-
1
def provenance_fixity_check( current_user:,
-
event_note: '',
-
fixity_check_status:,
-
fixity_check_note:,
-
**added_prov_key_values )
-
event = EVENT_FIXITY_CHECK
-
attributes, ignore_blank_key_values = attributes_for_provenance_fixity_check
-
added_prov_key_values = { fixity_check_status: fixity_check_status,
-
fixity_check_note: fixity_check_note }.merge added_prov_key_values
-
event_note = fixity_check_status if event_note.blank?
-
prov_key_values = provenance_attribute_values_for_snapshot( attributes: attributes,
-
current_user: current_user,
-
event: event,
-
event_note: event_note,
-
ignore_blank_key_values: ignore_blank_key_values,
-
**added_prov_key_values )
-
provenance_log_event( attributes: nil,
-
event: event,
-
current_user: current_user,
-
event_note: event_note,
-
ignore_blank_key_values: false,
-
prov_key_values: prov_key_values )
-
end
-
-
1
def provenance_ingest( current_user:,
-
event_note: '',
-
calling_class:,
-
ingest_id:,
-
ingester:,
-
ingest_timestamp:,
-
**added_prov_key_values )
-
event = EVENT_INGEST
-
attributes, ignore_blank_key_values = attributes_for_provenance_ingest
-
added_prov_key_values = { calling_class: calling_class,
-
ingest_id: ingest_id,
-
ingester: ingester,
-
ingest_timestamp: ingest_timestamp }.merge added_prov_key_values
-
prov_key_values = provenance_attribute_values_for_snapshot( attributes: attributes,
-
current_user: current_user,
-
event: event,
-
event_note: event_note,
-
ignore_blank_key_values: ignore_blank_key_values,
-
**added_prov_key_values )
-
provenance_log_event( attributes: nil,
-
event: event,
-
current_user: current_user,
-
event_note: event_note,
-
ignore_blank_key_values: false,
-
prov_key_values: prov_key_values )
-
end
-
-
1
def provenance_log_for_event( attributes: [],
-
current_user:,
-
event:,
-
event_note: '',
-
ignore_blank_key_values: false,
-
**prov_key_values )
-
-
raise ProvenanceLogError( "Unknown provenance log event: #{event}" ) unless EVENTS.contains( event )
-
provenance_log_event( attributes: attributes,
-
current_user: current_user,
-
event: event,
-
event_note: event_note,
-
ignore_blank_key_values: ignore_blank_key_values,
-
prov_key_values: prov_key_values )
-
end
-
-
1
def provenance_migrate( current_user:, event_note: '', migrate_direction:, parent_id: nil, **added_prov_key_values )
-
event = EVENT_MIGRATE
-
attributes, ignore_blank_key_values = attributes_for_provenance_migrate
-
added_prov_key_values = if parent_id.present?
-
{ migrate_direction: migrate_direction, parent_id: parent_id }.merge added_prov_key_values
-
else
-
{ migrate_direction: migrate_direction }.merge added_prov_key_values
-
end
-
event_note = migrate_direction if event_note.blank?
-
prov_key_values = provenance_attribute_values_for_snapshot( attributes: attributes,
-
current_user: current_user,
-
event: event,
-
event_note: event_note,
-
ignore_blank_key_values: ignore_blank_key_values,
-
**added_prov_key_values )
-
provenance_log_event( attributes: nil,
-
event: event,
-
current_user: current_user,
-
event_note: event_note,
-
ignore_blank_key_values: false,
-
prov_key_values: prov_key_values )
-
end
-
-
1
def provenance_mint_doi( current_user:, event_note: '' )
-
1
attributes, ignore_blank_key_values = attributes_for_provenance_mint_doi
-
1
provenance_log_event( attributes: attributes,
-
current_user: current_user,
-
event: EVENT_MINT_DOI,
-
event_note: event_note,
-
ignore_blank_key_values: ignore_blank_key_values )
-
end
-
-
1
def provenance_publish( current_user:, event_note: '', message: '' )
-
1
attributes, ignore_blank_key_values = attributes_for_provenance_publish
-
1
prov_key_values = provenance_attribute_values_for_snapshot( attributes: attributes,
-
current_user: current_user,
-
event: EVENT_PUBLISH,
-
event_note: event_note,
-
ignore_blank_key_values: ignore_blank_key_values,
-
message: message )
-
provenance_log_event( attributes: attributes,
-
current_user: current_user,
-
event: EVENT_PUBLISH,
-
event_note: event_note,
-
ignore_blank_key_values: ignore_blank_key_values,
-
prov_key_values: prov_key_values )
-
end
-
-
1
def provenance_tombstone( current_user:,
-
event_note: '',
-
epitaph:,
-
depositor_at_tombstone:,
-
visibility_at_tombstone: )
-
-
attributes, ignore_blank_key_values = attributes_for_provenance_tombstone
-
event = EVENT_TOMBSTONE
-
prov_key_values = provenance_attribute_values_for_snapshot( attributes: attributes,
-
current_user: current_user,
-
event: event,
-
event_note: event_note,
-
ignore_blank_key_values: false,
-
epitaph: epitaph,
-
depositor_at_tombstone: depositor_at_tombstone,
-
visibility_at_tombstone: visibility_at_tombstone )
-
provenance_log_event( attributes: attributes,
-
current_user: current_user,
-
event: event,
-
event_note: event_note,
-
ignore_blank_key_values: ignore_blank_key_values,
-
prov_key_values: prov_key_values )
-
end
-
-
1
def provenance_unembargo( current_user:, event_note: '', message: '', embargo_visibility:, embargo_visibility_after: )
-
attributes, ignore_blank_key_values = attributes_for_provenance_embargo
-
prov_key_values = provenance_attribute_values_for_snapshot( attributes: attributes,
-
current_user: current_user,
-
event: EVENT_UNEMBARGO,
-
event_note: event_note,
-
ignore_blank_key_values: ignore_blank_key_values,
-
message: message,
-
embargo_visibility: embargo_visibility,
-
embargo_visibility_after: embargo_visibility_after )
-
provenance_log_event( attributes: attributes,
-
current_user: current_user,
-
event: EVENT_UNEMBARGO,
-
event_note: event_note,
-
ignore_blank_key_values: ignore_blank_key_values,
-
prov_key_values: prov_key_values )
-
end
-
-
1
def provenance_unpublish( current_user:, event_note: '' )
-
1
attributes, ignore_blank_key_values = attributes_for_provenance_unpublish
-
1
provenance_log_event( attributes: attributes,
-
current_user: current_user,
-
event: EVENT_UNPUBLISH,
-
event_note: event_note,
-
ignore_blank_key_values: ignore_blank_key_values )
-
end
-
-
1
def provenance_update( current_user:, event_note: '', **added_prov_key_values )
-
# ::Deepblue::LoggingHelper.bold_debug [ ::Deepblue::LoggingHelper.here,
-
# ::Deepblue::LoggingHelper.called_from,
-
# "added_prov_key_values=#{added_prov_key_values}",
-
# "" ]
-
1
attributes, ignore_blank_key_values = attributes_for_provenance_update
-
1
event = EVENT_UPDATE
-
1
prov_key_values = provenance_attribute_values_for_snapshot( attributes: attributes,
-
current_user: current_user,
-
event: event,
-
event_note: event_note,
-
ignore_blank_key_values: ignore_blank_key_values,
-
**added_prov_key_values )
-
1
provenance_log_event( attributes: attributes,
-
current_user: current_user,
-
event: event,
-
event_note: event_note,
-
ignore_blank_key_values: ignore_blank_key_values,
-
prov_key_values: prov_key_values )
-
end
-
-
1
def provenance_update_embargo_key_values( update_attr_key_values: )
-
# ::Deepblue::LoggingHelper.bold_debug [ ::Deepblue::LoggingHelper.here,
-
# ::Deepblue::LoggingHelper.called_from,
-
# "update_attr_key_values=#{update_attr_key_values}",
-
# "" ]
-
1
return nil unless update_attr_key_values.present?
-
1
return nil unless update_attr_key_values.key? :embargo
-
embargo_key_values = update_attr_key_values[:embargo]
-
update_attr_key_values.delete :embargo
-
# ::Deepblue::LoggingHelper.bold_debug [ ::Deepblue::LoggingHelper.here,
-
# ::Deepblue::LoggingHelper.called_from,
-
# "update_attr_key_values=#{update_attr_key_values}",
-
# "embargo_key_values=#{embargo_key_values}",
-
# "" ]
-
embargo_key_values
-
end
-
-
1
def update_attribute_changed?( update_attr: )
-
old_value = update_attr[:old_value]
-
new_value = update_attr[:new_value]
-
old_value != new_value
-
end
-
-
1
def provenance_log_update_after( current_user:, event_note: '', update_attr_key_values: nil )
-
# ::Deepblue::LoggingHelper.bold_debug [ ::Deepblue::LoggingHelper.here,
-
# ::Deepblue::LoggingHelper.called_from,
-
# "update_attr_key_values=#{update_attr_key_values}",
-
# "" ]
-
1
embargo_key_values = provenance_update_embargo_key_values( update_attr_key_values: update_attr_key_values )
-
1
update_attr_key_values = ProvenanceHelper.update_attribute_key_values( curation_concern: for_provenance_object,
-
**update_attr_key_values )
-
# ::Deepblue::LoggingHelper.bold_debug [ ::Deepblue::LoggingHelper.here,
-
# ::Deepblue::LoggingHelper.called_from,
-
# "update_attr_key_values=#{update_attr_key_values}",
-
# "" ]
-
1
if update_attr_key_values.present? || embargo_key_values.present?
-
1
if embargo_key_values.present?
-
embargo_key_values.each_pair do |key, value|
-
update_attr_key_values[key] = value if update_attribute_changed?( update_attr: value )
-
end
-
end
-
1
provenance_update( current_user: current_user, event_note: event_note, **update_attr_key_values )
-
provenance_embargo( current_user: current_user,
-
event_note: event_note,
-
1
embargo_key_values: embargo_key_values ) if embargo_key_values.present?
-
end
-
end
-
-
1
def provenance_log_update_before( form_params: )
-
# LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
# Deepblue::LoggingHelper.called_from,
-
# Deepblue::LoggingHelper.obj_class( 'class', self ),
-
# ActiveSupport::JSON.encode( form_params ),
-
# 'form_params:',
-
# form_params ]
-
1
update_attr_key_values = ProvenanceHelper.form_params_to_update_attribute_key_values( curation_concern: for_provenance_object,
-
form_params: form_params )
-
# LoggingHelper.bold_debug [ "provenance_log_update_before", 'update_attr_key_values:', update_attr_key_values ]
-
1
update_attr_key_values
-
end
-
-
1
def provenance_update_version( current_user:,
-
event_note: '',
-
new_create_date:,
-
new_revision_id:,
-
prior_create_date:,
-
prior_revision_id:,
-
revision_id:,
-
**added_prov_key_values )
-
attributes, ignore_blank_key_values = attributes_for_provenance_update_version
-
event = EVENT_UPDATE_VERSION
-
added_prov_key_values = { new_create_date: new_create_date,
-
new_revision_id: new_revision_id,
-
prior_create_date: prior_create_date,
-
prior_revision_id: prior_revision_id,
-
revision_id: revision_id }.merge added_prov_key_values
-
prov_key_values = provenance_attribute_values_for_snapshot( attributes: attributes,
-
current_user: current_user,
-
event: event,
-
event_note: event_note,
-
ignore_blank_key_values: ignore_blank_key_values,
-
**added_prov_key_values )
-
provenance_log_event( attributes: attributes,
-
current_user: current_user,
-
event: event,
-
event_note: event_note,
-
ignore_blank_key_values: ignore_blank_key_values,
-
prov_key_values: prov_key_values )
-
end
-
-
1
def provenance_upload( current_user:, event_note: '' )
-
provenance_log_event( attributes: attributes_for_provenance_upload,
-
current_user: current_user,
-
event: EVENT_UPLOAD,
-
event_note: event_note,
-
ignore_blank_key_values: true )
-
end
-
-
1
def provenance_virus_scan( current_user: nil,
-
event_note: '',
-
scan_result:,
-
**added_prov_key_values )
-
event = EVENT_VIRUS_SCAN
-
attributes, ignore_blank_key_values = attributes_for_provenance_virus_scan
-
added_prov_key_values = { scan_result: scan_result }.merge added_prov_key_values
-
event_note = scan_result if event_note.blank?
-
prov_key_values = provenance_attribute_values_for_snapshot( attributes: attributes,
-
current_user: current_user,
-
event: event,
-
event_note: event_note,
-
ignore_blank_key_values: ignore_blank_key_values,
-
**added_prov_key_values )
-
provenance_log_event( attributes: nil,
-
event: event,
-
current_user: current_user,
-
event_note: event_note,
-
ignore_blank_key_values: false,
-
prov_key_values: prov_key_values )
-
end
-
-
1
def provenance_workflow( current_user: nil,
-
event_note: '',
-
workflow_name:,
-
workflow_state_prior:,
-
workflow_state:,
-
**added_prov_key_values )
-
event = EVENT_WORKFLOW
-
attributes, ignore_blank_key_values = attributes_for_provenance_workflow
-
added_prov_key_values = { workflow_name: workflow_name,
-
workflow_state_prior: workflow_state_prior,
-
workflow_state: workflow_state }.merge added_prov_key_values
-
event_note = workflow_state if event_note.blank?
-
prov_key_values = provenance_attribute_values_for_snapshot( attributes: attributes,
-
current_user: current_user,
-
event: event,
-
event_note: event_note,
-
ignore_blank_key_values: ignore_blank_key_values,
-
**added_prov_key_values )
-
provenance_log_event( attributes: nil,
-
event: event,
-
current_user: current_user,
-
event_note: event_note,
-
ignore_blank_key_values: false,
-
prov_key_values: prov_key_values )
-
end
-
-
1
protected
-
-
1
def provenance_log_event( attributes:,
-
current_user:,
-
event:,
-
event_note:,
-
ignore_blank_key_values:,
-
id: for_provenance_id,
-
prov_key_values: nil )
-
-
3
if prov_key_values.blank?
-
2
prov_key_values = provenance_attribute_values_for_snapshot( attributes: attributes,
-
current_user: current_user,
-
event: event,
-
event_note: event_note,
-
ignore_blank_key_values: ignore_blank_key_values )
-
end
-
1
class_name = for_provenance_class.name
-
1
for_provenance_event_cache_write( event: event, id: id )
-
1
ProvenanceHelper.log( class_name: class_name, id: id, event: event, event_note: event_note, **prov_key_values )
-
end
-
-
end
-
-
end
-
-
1
module ActiveFedora
-
1
module PersistenceExt
-
-
1
def self.prepended( base )
-
base.singleton_class.prepend( ClassMethods )
-
end
-
-
1
module ClassMethods
-
-
# def update( attributes )
-
# Rails.logger.debug "ActiveFedora::Persistence.update(#{ActiveSupport::JSON.encode attributes})"
-
# if respond_to? :provenance_attribute_values_before_update
-
# provenance_attribute_values_before_update = provenance_attribute_values_for_update( current_user: '' )
-
# Rails.logger.debug ">>>>>>"
-
# Rails.logger.debug "provenance_log_update_before"
-
# Rails.logger.debug "provenance_attribute_values_before_update=#{ActiveSupport::JSON.encode provenance_attribute_values_before_update}"
-
# Rails.logger.debug ">>>>>>"
-
# end
-
#
-
# rv = super( attributes )
-
#
-
# if respond_to? :provenance_attribute_values_before_update
-
# Rails.logger.debug ">>>>>>"
-
# Rails.logger.debug "provenance_log_update_after"
-
# Rails.logger.debug "provenance_attribute_values_before_update=#{ActiveSupport::JSON.encode provenance_attribute_values_before_update}"
-
# Rails.logger.debug ">>>>>>"
-
# provenance_update( current_user: '',
-
# provenance_attribute_values_before_update: provenance_attribute_values_before_update )
-
# end
-
# rv
-
# end
-
-
end
-
-
# def to_pretty_json
-
# JSON.pretty_generate(self)
-
# end
-
-
end
-
end
-
# frozen_string_literal: true
-
-
1
module Deepblue
-
-
# class WorkflowEventError < AbstractEventError
-
# end
-
-
1
module WorkflowEventBehavior
-
-
1
def workflow_create( current_user:, event_note: "" )
-
::Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
Deepblue::LoggingHelper.called_from,
-
Deepblue::LoggingHelper.obj_class( 'class', self ),
-
"current_user=#{current_user}",
-
"event_note=#{event_note}",
-
"" ]
-
provenance_create( current_user: current_user, event_note: event_note )
-
email_rds_create( current_user: current_user, event_note: event_note )
-
# parameters = email_rds_create( current_user: current_user, event_note: event_note, return_email_parameters: true )
-
# summary = "#{parameters[:subject]} - #{parameters[:id]}"
-
# jira_url = JiraHelper.new_ticket( summary: summary, description: parameters[ :body ] )
-
# ::Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
# Deepblue::LoggingHelper.called_from,
-
# Deepblue::LoggingHelper.obj_class( 'class', self ),
-
# "jira_url=#{jira_url}",
-
# "" ]
-
# return if jira_url.nil?
-
JiraNewTicketJob.perform_later( work_id: id, current_user: current_user )
-
end
-
-
1
def workflow_embargo( current_user:, event_note: "" )
-
::Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
Deepblue::LoggingHelper.called_from,
-
Deepblue::LoggingHelper.obj_class( 'class', self ),
-
"current_user=#{current_user}",
-
"event_note=#{event_note}",
-
"" ]
-
provenance_embargo( current_user: current_user, event_note: event_note )
-
end
-
-
1
def workflow_destroy( current_user:, event_note: "" )
-
::Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
Deepblue::LoggingHelper.called_from,
-
Deepblue::LoggingHelper.obj_class( 'class', self ),
-
"current_user=#{current_user}",
-
"event_note=#{event_note}",
-
"" ]
-
provenance_destroy( current_user: current_user, event_note: event_note )
-
email_rds_destroy( current_user: current_user, event_note: event_note )
-
end
-
-
1
def workflow_publish( current_user:, event_note: "", message: "" )
-
::Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
Deepblue::LoggingHelper.called_from,
-
Deepblue::LoggingHelper.obj_class( 'class', self ),
-
"current_user=#{current_user}",
-
"event_note=#{event_note}",
-
"message=#{message}",
-
"" ]
-
if respond_to? :date_published
-
# ::Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
# Deepblue::LoggingHelper.called_from,
-
# "self.date_modified=#{self.date_modified}",
-
# "self.date_published=#{self.date_published}",
-
# "" ]
-
self.date_published = Hyrax::TimeService.time_in_utc
-
self.date_modified = DateTime.now
-
self.save!
-
# ::Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
# Deepblue::LoggingHelper.called_from,
-
# "self.date_modified=#{self.date_modified}",
-
# "self.date_published=#{self.date_published}",
-
# "" ]
-
else
-
::Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
Deepblue::LoggingHelper.called_from,
-
"does not respond to :date_published",
-
"" ]
-
end
-
provenance_publish( current_user: current_user, event_note: event_note, message: message )
-
email_rds_publish( current_user: current_user, event_note: event_note, message: message )
-
end
-
-
1
def workflow_unembargo( current_user:, event_note: "" )
-
::Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
Deepblue::LoggingHelper.called_from,
-
Deepblue::LoggingHelper.obj_class( 'class', self ),
-
"current_user=#{current_user}",
-
"event_note=#{event_note}",
-
"" ]
-
provenance_embargo( current_user: current_user, event_note: event_note )
-
end
-
-
1
def workflow_unpublish( current_user:, event_note: "" )
-
::Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
Deepblue::LoggingHelper.called_from,
-
Deepblue::LoggingHelper.obj_class( 'class', self ),
-
"current_user=#{current_user}",
-
"event_note=#{event_note}",
-
"" ]
-
provenance_unpublish( current_user: current_user, event_note: event_note )
-
email_rds_unpublish( current_user: current_user, event_note: event_note )
-
end
-
-
1
def workflow_update_before( current_user:, event_note: "" )
-
-
end
-
-
1
def workflow_update_after( current_user:, event_note: "" )
-
-
end
-
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
require File.join( Gem::Specification.find_by_name("hyrax").full_gem_path, "app/models/concerns/hyrax/collection_behavior.rb" )
-
-
1
module Hyrax
-
-
# monkey patch Hyrax::CollectionBehavior
-
# monkey patch methods that don't specify row count to return from search_with_conditions.
-
# The default value for row count leads to errors for works with large numbers of files.
-
1
module CollectionBehavior
-
1
include ::Deepblue::WorkflowEventBehavior
-
-
# Compute the sum of each file in the collection using Solr to
-
# avoid having to access Fedora
-
#
-
# @return [Fixnum] size of collection in bytes
-
# @raise [RuntimeError] unsaved record does not exist in solr
-
1
def bytes
-
21
return 0 if member_object_ids.empty?
-
-
raise "Collection must be saved to query for bytes" if new_record?
-
-
# One query per member_id because Solr is not a relational database
-
member_object_ids.collect { |work_id| size_for_work(work_id) }.sum
-
end
-
-
# Use this query to get the ids of the member objects (since the containment
-
# association has been flipped)
-
1
def member_object_ids
-
21
return [] unless id
-
ActiveFedora::Base.search_with_conditions("member_of_collection_ids_ssim:#{id}", rows: 1000 ).map(&:id)
-
end
-
-
# Calculate the size of all the files in the work
-
# @param work_id [String] identifer for a work
-
# @return [Integer] the size in bytes
-
1
def size_for_work(work_id)
-
argz = { fl: "id, #{file_size_field}",
-
fq: "{!join from=#{member_ids_field} to=id}id:#{work_id}",
-
rows: 10_000 }
-
files = ::FileSet.search_with_conditions({}, argz)
-
files.reduce(0) { |sum, f| sum + f[file_size_field].to_i }
-
end
-
-
# Field name to look up when locating the size of each file in Solr.
-
# Override for your own installation if using something different
-
1
def file_size_field
-
Solrizer.solr_name(:file_size, Hyrax::FileSetIndexer::STORED_LONG)
-
end
-
-
end
-
-
end
-
1
module Hyrax
-
# Store a file uploaded by a user. Eventually these files get
-
# attached to FileSets and pushed into Fedora.
-
1
class UploadedFile < ActiveRecord::Base
-
1
self.table_name = 'uploaded_files'
-
1
mount_uploader :file, UploadedFileUploader
-
1
alias uploader file
-
1
has_many :job_io_wrappers,
-
inverse_of: 'uploaded_file',
-
class_name: 'JobIoWrapper',
-
dependent: :destroy
-
1
belongs_to :user, class_name: '::User'
-
-
1
before_destroy :remove_file!
-
end
-
end
-
# frozen_string_literal: true
-
-
1
module Umrdr
-
-
1
module SolrDocumentBehavior
-
1
extend ActiveSupport::Concern
-
-
1
def access_deepblue
-
fetch(Solrizer.solr_name('access_deepblue'), [])
-
end
-
-
1
def authoremail
-
Array(self[Solrizer.solr_name('authoremail')]).first
-
end
-
-
1
def curation_notes_admin
-
fetch(Solrizer.solr_name('curation_notes_admin'), [])
-
end
-
-
1
def curation_notes_user
-
fetch(Solrizer.solr_name('curation_notes_user'), [])
-
end
-
-
1
def date_coverage
-
Array(self[Solrizer.solr_name('date_coverage')]).first
-
end
-
-
1
def date_published
-
date_published2
-
end
-
-
1
def date_published2
-
self[ 'date_published_dtsim' ]
-
end
-
-
## begin DOI methods
-
-
1
def doi
-
rv = doi_the_correct_one
-
# rv = Array( self[ Solrizer.solr_name( 'doi', :symbol ) ] ).first
-
# rv = self[ Solrizer.solr_name( 'doi', :symbol ) ]
-
# ::Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
# Deepblue::LoggingHelper.called_from,
-
# Deepblue::LoggingHelper.obj_class( 'class', self ),
-
# "doi = #{doi}",
-
# "" ]
-
return rv
-
end
-
-
1
def doi_the_correct_one
-
# rv = Array( self[Solrizer.solr_name('doi')] ).first
-
rv = self[ Solrizer.solr_name( 'doi', :symbol ) ]
-
# ::Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
# Deepblue::LoggingHelper.called_from,
-
# Deepblue::LoggingHelper.obj_class( 'class', self ),
-
# "doi = #{doi}",
-
# "" ]
-
return rv
-
end
-
-
1
def doi_minted?
-
# the first time this is called, doi will not be in solr.
-
# @solr_document[ Solrizer.solr_name( 'doi', :symbol ) ].first
-
doi_the_correct_one.present?
-
rescue
-
nil
-
end
-
-
1
def doi_minting_enabled?
-
::Deepblue::DoiBehavior::DOI_MINTING_ENABLED
-
end
-
-
1
def doi_pending?
-
#@solr_document[ Solrizer.solr_name( 'doi', :symbol ) ].first == ::Deepblue::DoiBehavior::DOI_PENDING
-
doi_the_correct_one == ::Deepblue::DoiBehavior::DOI_PENDING
-
end
-
-
## end DOI methods
-
-
1
def file_size
-
Array(self['file_size_lts']).first # standard lookup Solrizer.solr_name('file_size')] produces solr_document['file_size_tesim']
-
end
-
-
1
def file_size_human_readable
-
size = file_size
-
ActiveSupport::NumberHelper::NumberToHumanSizeConverter.convert( size, precision: 3 )
-
end
-
-
1
def fundedby
-
fetch(Solrizer.solr_name('fundedby'), [])
-
end
-
-
1
def fundedby_other
-
Array(self[Solrizer.solr_name('fundedby_other')]).first
-
end
-
-
1
def grantnumber
-
Array(self[Solrizer.solr_name('grantnumber')]).first
-
end
-
-
1
def methodology
-
Array(self[Solrizer.solr_name('methodology')]).first
-
end
-
-
1
def original_checksum
-
Array(self[Solrizer.solr_name('original_checksum')]).first
-
end
-
-
1
def referenced_by
-
# Array(self[Solrizer.solr_name('referenced_by')]).first
-
fetch(Solrizer.solr_name('referenced_by'), [])
-
end
-
-
1
def rights_license_other
-
Array(self[Solrizer.solr_name('rights_license_other')]).first
-
end
-
-
1
def subject_discipline
-
fetch(Solrizer.solr_name('subject_discipline'), [])
-
end
-
-
1
def tombstone
-
Array(self[Solrizer.solr_name('tombstone')]).first
-
end
-
-
1
def total_file_size
-
Array(self['total_file_size_lts']).first # standard lookup Solrizer.solr_name('total_file_size')] produces solr_document['file_size_tesim']
-
end
-
-
1
def total_file_size_human_readable
-
total = total_file_size
-
ActiveSupport::NumberHelper::NumberToHumanSizeConverter.convert( total, precision: 3 )
-
end
-
-
1
def virus_scan_service
-
Array(self[Solrizer.solr_name('virus_scan_service')]).first
-
end
-
-
1
def virus_scan_status
-
Array(self[Solrizer.solr_name('virus_scan_status')]).first
-
end
-
-
1
def virus_scan_status_date
-
Array(self[Solrizer.solr_name('virus_scan_status_date')]).first
-
end
-
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
module Umrdr
-
1
module UmrdrWorkBehavior
-
1
extend ActiveSupport::Concern
-
-
# Calculate the size of all the files in the work
-
# @return [Integer] the size in bytes
-
1
def size_of_work
-
work_id = id
-
file_size_field = Solrizer.solr_name(:file_size, Hyrax::FileSetIndexer::STORED_LONG)
-
member_ids_field = Solrizer.solr_name('member_ids', :symbol)
-
argz = { fl: "id, #{file_size_field}",
-
fq: "{!join from=#{member_ids_field} to=id}id:#{work_id}",
-
rows: 10_000 }
-
files = ::FileSet.search_with_conditions({}, argz)
-
files.reduce(0) { |sum, f| sum + f[file_size_field].to_i }
-
end
-
-
1
def total_file_size_add_file_set( _file_set )
-
# size = file_size_from_file_set file_set
-
# total_file_size_add size
-
update_total_file_size
-
end
-
-
1
def total_file_size_add_file_set!( _file_set )
-
# size = file_size_from_file_set file_set
-
# total_file_size_add! size
-
update_total_file_size!
-
end
-
-
1
def total_file_size_human_readable
-
1
total = total_file_size
-
1
ActiveSupport::NumberHelper::NumberToHumanSizeConverter.convert( total, precision: 3 )
-
end
-
-
1
def total_file_size_subtract_file_set( _file_set )
-
# size = file_size_from_file_set file_set
-
# total_file_size_add( -size )
-
update_total_file_size
-
end
-
-
1
def total_file_size_subtract_file_set!( _file_set )
-
# size = file_size_from_file_set file_set
-
# total_file_size_add!( -size )
-
update_total_file_size!
-
end
-
-
1
def update_total_file_size
-
# total = 0
-
# file_sets.each do |fs|
-
# file_size = file_size_from_file_set fs
-
# total += file_size
-
# end
-
total = size_of_work
-
self.total_file_size = total
-
# total_file_size_human_readable = ActiveSupport::NumberHelper::NumberToHumanSizeConverter.convert( total, precision: 3 )
-
end
-
-
1
def update_total_file_size!
-
update_total_file_size
-
save!
-
end
-
-
1
private
-
-
1
def file_size_from_file_set( file_set )
-
return 0 if file_set.nil?
-
# return 0 if file_set.file_size.blank?
-
# file_set.file_size[0].to_i
-
file_set.file_size_value
-
end
-
-
1
def total_file_size_add( file_size )
-
current_total_size = total_file_size
-
current_total_size = ( current_total_size.nil? ? 0 : current_total_size ) + file_size
-
current_total_size = 0 if current_total_size.negative?
-
self.total_file_size = current_total_size
-
end
-
-
1
def total_file_size_add!( file_size )
-
if 1 == file_sets.size
-
total_file_size_set file_size
-
save!
-
elsif 0 != file_size
-
total_file_size_add file_size
-
save!
-
end
-
end
-
-
1
def total_file_size_set( file_size )
-
self.total_file_size = file_size
-
end
-
-
end
-
end
-
# frozen_string_literal: true
-
-
1
module Umrdr
-
1
module UmrdrWorkMetadata
-
1
extend ActiveSupport::Concern
-
-
1
included do
-
-
2
property :access_deepblue, predicate: ::RDF::URI.new('https://deepblue.lib.umich.edu/data/help.help#access_deepblue'), multiple: true do |index|
-
4
index.type :text
-
4
index.as :stored_searchable
-
end
-
-
2
property :access_deepblue_ordered, predicate: ::RDF::URI.new('https://deepblue.lib.umich.edu/data/help.help#access_deepblue_ordered'), multiple: false do |index|
-
4
index.type :text
-
4
index.as :stored_searchable
-
end
-
-
2
property :authoremail, predicate: ::RDF::Vocab::FOAF.mbox, multiple: false do |index|
-
4
index.type :text
-
4
index.as :stored_searchable
-
end
-
-
2
property :creator_ordered, predicate: ::RDF::Vocab::MODS.name, multiple: false do |index|
-
4
index.type :text
-
4
index.as :stored_searchable
-
end
-
-
2
property :curation_notes_admin, predicate: ::RDF::URI.new('https://deepblue.lib.umich.edu/data/help.help#curation_notes_admin'), multiple: true do |index|
-
4
index.type :text
-
4
index.as :stored_searchable
-
end
-
-
2
property :curation_notes_admin_ordered, predicate: ::RDF::URI.new('https://deepblue.lib.umich.edu/data/help.help#curation_notes_admin_ordered'), multiple: false do |index|
-
4
index.type :text
-
4
index.as :stored_searchable
-
end
-
-
2
property :curation_notes_user, predicate: ::RDF::URI.new('https://deepblue.lib.umich.edu/data/help.help#curation_notes_user'), multiple: true do |index|
-
4
index.type :text
-
4
index.as :stored_searchable
-
end
-
-
2
property :curation_notes_user_ordered, predicate: ::RDF::URI.new('https://deepblue.lib.umich.edu/data/help.help#curation_notes_user_ordered'), multiple: false do |index|
-
4
index.type :text
-
4
index.as :stored_searchable
-
end
-
-
2
property :date_coverage, predicate: ::RDF::Vocab::DC.temporal, multiple: false do |index|
-
4
index.type :text
-
4
index.as :stored_searchable, :facetable
-
end
-
-
2
property :date_published, predicate: ::RDF::URI.new('https://deepblue.lib.umich.edu/data/help.help#date_published'), multiple: false do |index|
-
4
index.type :text
-
4
index.as :stored_searchable, :facetable
-
end
-
-
2
property :description_ordered, predicate: ::RDF::URI.new('https://deepblue.lib.umich.edu/data/help.help#description_ordered'), multiple: false do |index|
-
4
index.type :text
-
4
index.as :stored_searchable
-
end
-
-
2
property :doi, predicate: ::RDF::Vocab::Identifiers.doi, multiple: false do |index|
-
4
index.type :text
-
4
index.as :stored_searchable
-
end
-
-
2
property :fundedby, predicate: ::RDF::Vocab::DISCO.fundedBy, multiple: true do |index|
-
4
index.type :text
-
4
index.as :stored_searchable
-
end
-
-
2
property :fundedby_other, predicate: ::RDF::URI.new('https://deepblue.lib.umich.edu/data/help.help#fundedby_other'), multiple: true do |index|
-
4
index.type :text
-
4
index.as :stored_searchable
-
end
-
-
2
property :grantnumber, predicate: ::RDF::URI.new('http://purl.org/cerif/frapo/hasGrantNumber'), multiple: false do |index|
-
4
index.type :text
-
4
index.as :stored_searchable
-
end
-
-
2
property :hdl, predicate: ::RDF::Vocab::Identifiers.hdl, multiple: false
-
-
2
property :referenced_by, predicate: ::RDF::Vocab::DC.isReferencedBy, multiple: true do |index|
-
4
index.type :text
-
4
index.as :stored_searchable
-
end
-
-
2
property :referenced_by_ordered, predicate: ::RDF::URI.new('https://deepblue.lib.umich.edu/data/help.help#referenced_by_ordered'), multiple: false do |index|
-
4
index.type :text
-
4
index.as :stored_searchable
-
end
-
-
2
property :rights_license_other, predicate: ::RDF::URI.new('https://deepblue.lib.umich.edu/data/help.help#rights_license_other'), multiple: false do |index|
-
4
index.type :text
-
4
index.as :stored_searchable
-
end
-
-
2
property :keyword_ordered, predicate: ::RDF::URI.new('https://deepblue.lib.umich.edu/data/help.help#keyword_ordered'), multiple: false do |index|
-
4
index.type :text
-
4
index.as :stored_searchable
-
end
-
-
2
property :language_ordered, predicate: ::RDF::URI.new('https://deepblue.lib.umich.edu/data/help.help#language_ordered'), multiple: false do |index|
-
4
index.type :text
-
4
index.as :stored_searchable
-
end
-
-
2
property :methodology, predicate: ::RDF::URI.new('http://www.ddialliance.org/Specification/DDI-Lifecycle/3.2/XMLSchema/FieldLevelDocumentation/schemas/datacollection_xsd/elements/DataCollectionMethodology.html'), multiple: false do |index|
-
4
index.type :text
-
4
index.as :stored_searchable
-
end
-
-
2
property :subject_discipline, predicate: ::RDF::Vocab::MODS.subject, multiple: true do |index|
-
4
index.type :text
-
4
index.as :stored_searchable, :facetable
-
end
-
-
2
property :title_ordered, predicate: ::RDF::URI.new('https://deepblue.lib.umich.edu/data/help.help#title_ordered'), multiple: false do |index|
-
4
index.type :text
-
4
index.as :stored_searchable
-
end
-
-
2
property :tombstone, predicate: ::RDF::Vocab::DC.provenance, multiple: true do |index|
-
4
index.type :text
-
4
index.as :stored_searchable
-
end
-
-
2
property :total_file_size, predicate: ::RDF::Vocab::DC.SizeOrDuration, multiple: false
-
-
# TODO: can't use the same predicate twice
-
# property :total_file_size_human_readable, predicate: ::RDF::Vocab::DC.SizeOrDuration, multiple: false
-
-
end
-
-
end
-
end
-
# frozen_string_literal: true
-
-
1
class DataSet < ActiveFedora::Base
-
-
1
include ::Hyrax::WorkBehavior
-
-
1
self.indexer = DataSetIndexer
-
# Change this to restrict which works can be added as a child.
-
# self.valid_child_concerns = []
-
-
# self.human_readable_type = 'Data Set' # deprecated
-
1
include Umrdr::UmrdrWorkBehavior
-
1
include Umrdr::UmrdrWorkMetadata
-
1
include ::Deepblue::EmbargoableBehavior
-
-
1
validates :authoremail, presence: { message: 'You must have author contact information.' }
-
1
validates :creator, presence: { message: 'Your work must have a creator.' }
-
1
validates :description, presence: { message: 'Your work must have a description.' }
-
1
validates :methodology, presence: { message: 'Your work must have a description of the method for collecting the dataset.' }
-
1
validates :rights_license, presence: { message: 'You must select a license for your work.' }
-
1
validates :title, presence: { message: 'Your work must have a title.' }
-
-
# This must be included at the end, because it finalizes the metadata
-
# schema (by adding accepts_nested_attributes)
-
1
include ::Deepblue::DefaultMetadata
-
-
1
include ::Deepblue::MetadataBehavior
-
1
include ::Deepblue::EmailBehavior
-
1
include ::Deepblue::ProvenanceBehavior
-
1
include ::Deepblue::DoiBehavior
-
1
include ::Deepblue::WorkflowEventBehavior
-
-
1
after_initialize :set_defaults
-
-
1
before_destroy :provenance_before_destroy_data_set
-
-
1
def provenance_before_destroy_data_set
-
# workflow_destroy does this
-
# provenance_destroy( current_user: '' ) # , event_note: 'provenance_before_destroy_data_set' )
-
end
-
-
1
def set_defaults
-
48
return unless new_record?
-
48
self.resource_type = ["Dataset"]
-
end
-
-
1
def metadata_keys_all
-
7
%i[
-
access_deepblue
-
admin_set_id
-
authoremail
-
creator
-
curation_notes_admin
-
curation_notes_user
-
date_coverage
-
date_created
-
date_modified
-
date_published
-
date_updated
-
depositor
-
description
-
doi
-
file_set_ids
-
fundedby
-
fundedby_other
-
grantnumber
-
keyword
-
language
-
location
-
methodology
-
prior_identifier
-
referenced_by
-
rights_license
-
rights_license_other
-
subject_discipline
-
title
-
tombstone
-
total_file_count
-
total_file_size
-
total_file_size_human_readable
-
visibility
-
]
-
end
-
-
# Title
-
# Creator
-
# Contact information
-
# Discipline
-
# Record URL
-
1
def metadata_keys_email_standard
-
%i[
-
title
-
creator
-
depositor
-
authoremail
-
description
-
subject_discipline
-
location
-
]
-
end
-
-
1
def metadata_keys_brief
-
1
%i[
-
authoremail
-
title
-
visibility
-
]
-
end
-
-
1
def metadata_keys_report
-
%i[
-
access_deepblue
-
authoremail
-
creator
-
curation_notes_user
-
date_coverage
-
date_published
-
depositor
-
description
-
doi
-
fundedby
-
fundedby_other
-
grantnumber
-
keyword
-
language
-
methodology
-
referenced_by
-
rights_license
-
rights_license_other
-
subject_discipline
-
title
-
total_file_count
-
total_file_size_human_readable
-
]
-
end
-
-
1
def metadata_keys_update
-
3
%i[
-
authoremail
-
title
-
visibility
-
]
-
end
-
-
1
def attributes_all_for_email
-
metadata_keys_all
-
end
-
-
1
def attributes_all_for_provenance
-
7
metadata_keys_all
-
end
-
-
1
def attributes_brief_for_email
-
metadata_keys_brief
-
end
-
-
1
def attributes_brief_for_provenance
-
1
metadata_keys_brief
-
end
-
-
1
def attributes_for_email_rds_create
-
attributes = %i[ title location creator depositor authoremail subject_discipline id type ]
-
return attributes, Deepblue::AbstractEventBehavior::USE_BLANK_KEY_VALUES
-
end
-
-
1
def attributes_standard_for_email
-
metadata_keys_email_standard
-
end
-
-
1
def attributes_update_for_email
-
metadata_keys_update
-
end
-
-
1
def attributes_update_for_provenance
-
3
metadata_keys_update
-
end
-
-
1
def for_email_route
-
for_event_route
-
end
-
-
1
def for_event_route
-
Rails.application.routes.url_helpers.hyrax_data_set_path( id: self.id ) # rubocop:disable Style/RedundantSelf
-
end
-
-
1
def for_provenance_route
-
for_event_route
-
end
-
-
1
def human_readable_type
-
'Work'
-
end
-
-
1
def title_type
-
'Data Set'
-
end
-
-
1
def map_email_attributes_override!( event:, # rubocop:disable Lint/UnusedMethodArgument
-
attribute:,
-
ignore_blank_key_values:,
-
email_key_values: )
-
value = nil
-
handled = case attribute.to_s
-
when 'data_set_url'
-
value = data_set_url
-
true
-
when 'location'
-
value = data_set_url
-
true
-
when 'file_set_ids'
-
value = file_set_ids
-
true
-
when 'total_file_count'
-
value = total_file_count
-
true
-
when 'total_file_size_human_readable'
-
value = total_file_size_human_readable
-
true
-
when 'visibility'
-
value = visibility
-
true
-
when 'work_or_collection'
-
value = "Work"
-
true
-
when 'type'
-
value = "Work"
-
true
-
else
-
false
-
end
-
return false unless handled
-
if ignore_blank_key_values
-
email_key_values[attribute] = value if value.present?
-
else
-
email_key_values[attribute] = value
-
end
-
return true
-
end
-
-
1
def data_set_url
-
Deepblue::EmailHelper.data_set_url( data_set: self )
-
rescue Exception => e # rubocop:disable Lint/RescueException
-
Rails.logger.error "#{e.class} #{e.message} at #{e.backtrace[0]}"
-
return e.to_s
-
end
-
-
1
def workflow_state
-
wgid = to_global_id.to_s
-
entity = Sipity::Entity.where( proxy_for_global_id: wgid )&.first
-
entity&.workflow_state_name
-
end
-
-
1
def map_provenance_attributes_override!( event:, # rubocop:disable Lint/UnusedMethodArgument
-
attribute:,
-
ignore_blank_key_values:,
-
prov_key_values: )
-
51
value = nil
-
51
handled = case attribute.to_s
-
when 'file_set_ids'
-
4
value = file_set_ids
-
true
-
when 'total_file_count'
-
value = total_file_count
-
true
-
when 'total_file_size_human_readable'
-
value = total_file_size_human_readable
-
true
-
when 'visibility'
-
2
value = visibility
-
2
true
-
else
-
45
false
-
end
-
47
return false unless handled
-
2
if ignore_blank_key_values
-
1
prov_key_values[attribute] = value if value.present?
-
else
-
1
prov_key_values[attribute] = value
-
end
-
2
return true
-
end
-
-
1
def metadata_hash_override( key:, ignore_blank_values:, key_values: )
-
4
value = nil
-
4
handled = case key.to_s
-
when 'file_set_ids'
-
1
value = file_set_ids
-
true
-
when 'total_file_count'
-
value = total_file_count
-
true
-
when 'total_file_size'
-
1
value = total_file_size
-
1
true
-
when 'total_file_size_human_readable'
-
1
value = total_file_size_human_readable
-
1
true
-
# when 'visibility'
-
# value = visibility
-
# true
-
else
-
1
false
-
end
-
3
return false unless handled
-
2
if ignore_blank_values
-
key_values[key] = value if value.present?
-
else
-
2
key_values[key] = value
-
end
-
2
return true
-
end
-
-
1
def metadata_report_contained_objects
-
file_sets
-
end
-
-
1
def metadata_report_keys
-
return IGNORE_BLANK_KEY_VALUES, metadata_keys_report
-
end
-
-
1
def metadata_report_label_override( metadata_key:, metadata_value: ) # rubocop:disable Lint/UnusedMethodArgument
-
case metadata_key.to_s
-
when 'file_set_ids'
-
'File Set IDs: '
-
when 'total_file_count'
-
'Total File Count: '
-
when 'total_file_size_human_readable'
-
'Total File Size: '
-
end
-
end
-
-
1
def metadata_report_title_pre
-
'DataSet: '
-
end
-
-
# # Visibility helpers
-
# def private?
-
# visibility == Hydra::AccessControls::AccessRight::VISIBILITY_TEXT_VALUE_PRIVATE
-
# end
-
#
-
# def public?
-
# visibility == Hydra::AccessControls::AccessRight::VISIBILITY_TEXT_VALUE_PUBLIC
-
# end
-
-
#
-
# Make it so work does not show up in search result for anyone, not even admins.
-
#
-
1
def entomb!( epitaph, current_user )
-
1
return false if tombstone.present?
-
1
depositor_at_tombstone = depositor
-
1
visibility_at_tombstone = visibility
-
1
self.visibility = Hydra::AccessControls::AccessRight::VISIBILITY_TEXT_VALUE_PRIVATE
-
1
self.depositor = depositor
-
1
self.tombstone = [epitaph]
-
-
1
file_sets.each do |file_set|
-
# TODO: FileSet#entomb!
-
file_set.visibility = Hydra::AccessControls::AccessRight::VISIBILITY_TEXT_VALUE_PRIVATE
-
end
-
save
-
provenance_tombstone( current_user: current_user,
-
epitaph: epitaph,
-
depositor_at_tombstone: depositor_at_tombstone,
-
visibility_at_tombstone: visibility_at_tombstone )
-
true
-
end
-
-
# begin metadata
-
-
# the list of creators is ordered
-
1
def creator
-
347
values = super
-
347
values = Deepblue::MetadataHelper.ordered( ordered_values: creator_ordered, values: values )
-
347
return values
-
end
-
-
1
def creator=( values )
-
27
self.creator_ordered = Deepblue::MetadataHelper.ordered_values( ordered_values: creator_ordered, values: values )
-
27
super values
-
end
-
-
# the list of curation_notes_admin is ordered
-
1
def curation_notes_admin
-
317
values = super
-
317
values = Deepblue::MetadataHelper.ordered( ordered_values: curation_notes_admin_ordered, values: values )
-
317
return values
-
end
-
-
1
def curation_notes_admin=( values )
-
self.curation_notes_admin_ordered = Deepblue::MetadataHelper.ordered_values( ordered_values: curation_notes_admin_ordered, values: values )
-
super values
-
end
-
-
# the list of curation_notes_user is ordered
-
1
def curation_notes_user
-
317
values = super
-
317
values = Deepblue::MetadataHelper.ordered( ordered_values: curation_notes_user_ordered, values: values )
-
317
return values
-
end
-
-
1
def curation_notes_user=( values )
-
self.curation_notes_user_ordered = Deepblue::MetadataHelper.ordered_values( ordered_values: curation_notes_user_ordered, values: values )
-
super values
-
end
-
-
# the list of description is ordered
-
1
def description
-
347
values = super
-
347
values = Deepblue::MetadataHelper.ordered( ordered_values: description_ordered, values: values )
-
347
return values
-
end
-
-
1
def description=( values )
-
27
self.description_ordered = Deepblue::MetadataHelper.ordered_values( ordered_values: description_ordered, values: values )
-
27
super values
-
end
-
-
#
-
# the list of keyword is ordered
-
#
-
1
def keyword
-
317
values = super
-
317
values = Deepblue::MetadataHelper.ordered( ordered_values: keyword_ordered, values: values )
-
317
return values
-
end
-
-
1
def keyword=( values )
-
self.keyword_ordered = Deepblue::MetadataHelper.ordered_values( ordered_values: keyword_ordered, values: values )
-
super values
-
end
-
-
#
-
# handle the list of language as ordered
-
#
-
1
def language
-
317
values = super
-
317
values = Deepblue::MetadataHelper.ordered( ordered_values: language_ordered, values: values )
-
317
return values
-
end
-
-
1
def language=( values )
-
self.language_ordered = Deepblue::MetadataHelper.ordered_values( ordered_values: language_ordered, values: values )
-
super values
-
end
-
-
#
-
# handle the list of referenced_by as ordered
-
#
-
1
def referenced_by
-
317
values = super
-
317
values = Deepblue::MetadataHelper.ordered( ordered_values: referenced_by_ordered, values: values )
-
317
return values
-
end
-
-
1
def referenced_by=( values )
-
self.referenced_by_ordered = Deepblue::MetadataHelper.ordered_values( ordered_values: referenced_by_ordered, values: values )
-
super values
-
end
-
-
1
def access_deepblue
-
316
values = super
-
316
values = Deepblue::MetadataHelper.ordered( ordered_values: access_deepblue_ordered, values: values )
-
316
return values
-
end
-
-
1
def access_deepblue=( values )
-
self.access_deepblue_ordered = Deepblue::MetadataHelper.ordered_values( ordered_values: access_deepblue_ordered, values: values )
-
super values
-
end
-
-
# the list of title is ordered
-
1
def title
-
345
values = super
-
345
values = Deepblue::MetadataHelper.ordered( ordered_values: title_ordered, values: values )
-
345
return values
-
end
-
-
1
def title=( values )
-
27
self.title_ordered = Deepblue::MetadataHelper.ordered_values( ordered_values: title_ordered, values: values )
-
27
super values
-
end
-
-
# end metadata
-
-
1
def total_file_count
-
return 0 if file_set_ids.blank?
-
file_set_ids.size
-
end
-
-
end
-
# Generated via
-
# `rails generate hyrax:work Dissertation`
-
1
class Dissertation < ActiveFedora::Base
-
1
include ::Hyrax::WorkBehavior
-
-
1
self.indexer = DissertationIndexer
-
# Change this to restrict which works can be added as a child.
-
# self.valid_child_concerns = []
-
1
validates :title, presence: { message: 'Your work must have a title.' }
-
-
# This must be included at the end, because it finalizes the metadata
-
# schema (by adding accepts_nested_attributes)
-
#include ::Hyrax::BasicMetadata
-
1
include ::Deepblue::DefaultMetadata
-
end
-
# frozen_string_literal: true
-
-
1
class FileSet < ActiveFedora::Base
-
-
1
include ::Deepblue::FileSetMetadata # must be before `include ::Hyrax::FileSetBehavior`
-
1
include ::Hyrax::FileSetBehavior
-
1
include ::Deepblue::FileSetBehavior
-
1
include ::Deepblue::MetadataBehavior
-
1
include ::Deepblue::ProvenanceBehavior
-
1
include ::Deepblue::EmbargoableBehavior
-
1
include ::Datacore::PresentsArchiveFile
-
-
1
before_destroy :provenance_before_destroy_file_set
-
-
1
def provenance_before_destroy_file_set
-
# workflow_destroy does this
-
# provenance_destroy( current_user: '' ) # , event_note: 'provenance_before_destroy_file_set' )
-
end
-
-
1
def metadata_keys_all
-
1
%i[
-
curation_notes_admin
-
curation_notes_user
-
date_created
-
date_modified
-
date_uploaded
-
file_extension
-
files_count
-
file_size
-
file_size_human_readable
-
label
-
location
-
mime_type
-
original_checksum
-
original_name
-
parent_id
-
prior_identifier
-
title
-
uri
-
version_count
-
virus_scan_service
-
virus_scan_status
-
virus_scan_status_date
-
visibility
-
]
-
end
-
-
1
def metadata_keys_brief
-
1
%i[
-
title
-
label
-
parent_id
-
file_extension
-
visibility
-
]
-
end
-
-
1
def metadata_keys_report
-
%i[
-
curation_notes_user
-
file_extension
-
files_count
-
file_size_human_readable
-
label
-
mime_type
-
original_checksum
-
original_name
-
parent_id
-
title
-
]
-
end
-
-
1
def metadata_keys_update
-
1
%i[
-
title
-
label
-
parent_id
-
file_extension
-
version_count
-
visibility
-
]
-
end
-
-
1
def metadata_keys_virus
-
%i[
-
title
-
label
-
parent_id
-
file_extension
-
virus_scan_service
-
virus_scan_status
-
virus_scan_status_date
-
visibility
-
]
-
end
-
-
1
def attributes_all_for_email
-
metadata_keys_all
-
end
-
-
1
def attributes_all_for_provenance
-
1
metadata_keys_all
-
end
-
-
1
def attributes_brief_for_email
-
metadata_keys_brief
-
end
-
-
1
def attributes_brief_for_provenance
-
1
metadata_keys_brief
-
end
-
-
1
def attributes_standard_for_email
-
metadata_keys_brief
-
end
-
-
1
def attributes_update_for_email
-
metadata_keys_update
-
end
-
-
1
def attributes_update_for_provenance
-
1
metadata_keys_update
-
end
-
-
1
def attributes_virus_for_provenance
-
metadata_keys_virus
-
end
-
-
1
def files_to_file
-
return nil if files.blank?
-
files.each do |f|
-
return f if f.original_name.present?
-
end
-
nil
-
end
-
-
1
def for_provenance_route
-
Rails.application.routes.url_helpers.hyrax_file_set_path( id: id )
-
end
-
-
1
def title_type
-
human_readable_type
-
end
-
-
1
def map_provenance_attributes_override!( event:, # rubocop:disable Lint/UnusedMethodArgument
-
attribute:,
-
ignore_blank_key_values:,
-
prov_key_values: )
-
value = nil
-
handled = case attribute.to_s
-
when 'file_extension'
-
value = File.extname label if label.present?
-
true
-
when 'files_count'
-
value = files.size
-
true
-
when 'file_size'
-
value = file_size_value
-
true
-
when 'file_size_human_readable'
-
value = file_size_value
-
value = ActiveSupport::NumberHelper::NumberToHumanSizeConverter.convert( value, precision: 3 )
-
true
-
when 'label'
-
value = label
-
true
-
when 'mime_type'
-
value = mime_type
-
true
-
when 'parent_id'
-
value = parent.id unless parent.nil?
-
true
-
when 'original_checksum'
-
value = original_checksum.blank? ? '' : original_checksum[0]
-
true
-
when 'original_name'
-
value = original_name_value
-
true
-
when 'uri'
-
value = uri.value
-
true
-
when 'version_count'
-
value = version_count
-
true
-
when 'visibility'
-
value = visibility
-
true
-
else
-
false
-
end
-
return false unless handled
-
if ignore_blank_key_values
-
prov_key_values[attribute] = value if value.present?
-
else
-
prov_key_values[attribute] = value
-
end
-
return true
-
end
-
-
1
def file_size_value
-
if file_size.blank?
-
original_file.nil? ? 0 : original_file.size
-
else
-
file_size[0]
-
end
-
end
-
-
1
def original_name_value
-
return '' if original_file.nil?
-
return original_file.original_name if original_file.respond_to?( :original_name )
-
return ''
-
end
-
-
1
def metadata_hash_override( key:, ignore_blank_values:, key_values: )
-
value = nil
-
handled = case key.to_s
-
when 'file_extension'
-
value = File.extname label if label.present?
-
true
-
when 'files_count'
-
value = files.size
-
true
-
when 'file_size'
-
value = file_size_value
-
true
-
when 'file_size_human_readable'
-
value = file_size_value
-
value = ActiveSupport::NumberHelper::NumberToHumanSizeConverter.convert( value, precision: 3 )
-
true
-
when 'label'
-
value = label
-
true
-
when 'mime_type'
-
value = mime_type
-
true
-
when 'parent_id'
-
value = parent.id unless parent.nil?
-
true
-
when 'original_checksum'
-
value = original_checksum.blank? ? '' : original_checksum[0]
-
true
-
when 'original_name'
-
value = original_name_value
-
true
-
when 'uri'
-
value = uri.value
-
true
-
when 'version_count'
-
value = version_count
-
true
-
when 'visibility'
-
value = visibility
-
true
-
else
-
false
-
end
-
return false unless handled
-
if ignore_blank_values
-
key_values[key] = value if value.present?
-
else
-
key_values[key] = value
-
end
-
return true
-
end
-
-
1
def metadata_report_keys
-
return IGNORE_BLANK_KEY_VALUES, metadata_keys_report
-
end
-
-
1
def metadata_report_title_pre
-
'FileSet: '
-
end
-
-
# begin metadata
-
-
# the list of curation_notes_admin is ordered
-
1
def curation_notes_admin
-
11
values = super
-
11
values = Deepblue::MetadataHelper.ordered( ordered_values: curation_notes_admin_ordered, values: values )
-
11
return values
-
end
-
-
1
def curation_notes_admin=( values )
-
self.curation_notes_admin_ordered = Deepblue::MetadataHelper.ordered_values( ordered_values: curation_notes_admin_ordered, values: values )
-
super values
-
end
-
-
# the list of curation_notes_user is ordered
-
1
def curation_notes_user
-
11
values = super
-
11
values = Deepblue::MetadataHelper.ordered( ordered_values: curation_notes_user_ordered, values: values )
-
11
return values
-
end
-
-
1
def curation_notes_user=( values )
-
self.curation_notes_user_ordered = Deepblue::MetadataHelper.ordered_values( ordered_values: curation_notes_user_ordered, values: values )
-
super values
-
end
-
-
# end metadata
-
-
end
-
# Generated via
-
# `rails generate hyrax:work GenericWork`
-
1
class GenericWork < ActiveFedora::Base
-
1
include ::Hyrax::WorkBehavior
-
-
1
self.indexer = GenericWorkIndexer
-
# Change this to restrict which works can be added as a child.
-
# self.valid_child_concerns = []
-
1
validates :title, presence: { message: 'Your work must have a title.' }
-
-
# This must be included at the end, because it finalizes the metadata
-
# schema (by adding accepts_nested_attributes)
-
#include ::Hyrax::BasicMetadata
-
1
include ::Deepblue::DefaultMetadata
-
end
-
1
class Qa::LocalAuthority < ApplicationRecord
-
end
-
1
class Qa::LocalAuthorityEntry < ApplicationRecord
-
1
belongs_to :local_authority
-
end
-
# frozen_string_literal: true
-
1
class SearchBuilder < Blacklight::SearchBuilder
-
1
include Blacklight::Solr::SearchBuilderBehavior
-
# Add a filter query to restrict the search to documents the current user has access to
-
1
include Hydra::AccessControlsEnforcement
-
1
include Hyrax::SearchFilters
-
-
-
##
-
# @example Adding a new step to the processor chain
-
# self.default_processor_chain += [:add_custom_data_to_query]
-
#
-
# def add_custom_data_to_query(solr_parameters)
-
# solr_parameters[:custom] = blacklight_params[:user_value]
-
# end
-
end
-
# frozen_string_literal: true
-
-
1
class SolrDocument
-
-
1
include Blacklight::Solr::Document
-
# include BlacklightOaiProvider::SolrDocumentBehavior
-
-
1
include Blacklight::Gallery::OpenseadragonSolrDocument
-
-
# Adds Hyrax behaviors to the SolrDocument.
-
1
include Hyrax::SolrDocumentBehavior
-
1
include Umrdr::SolrDocumentBehavior
-
-
# .unique_key = 'id'
-
-
# Email uses the semantic field mappings below to generate the body of an email.
-
1
SolrDocument.use_extension(Blacklight::Document::Email)
-
-
# SMS uses the semantic field mappings below to generate the body of an SMS email.
-
1
SolrDocument.use_extension(Blacklight::Document::Sms)
-
-
# DublinCore uses the semantic field mappings below to assemble an OAI-compliant Dublin Core document
-
# Semantic mappings of solr stored fields. Fields may be multi or
-
# single valued. See Blacklight::Document::SemanticFields#field_semantics
-
# and Blacklight::Document::SemanticFields#to_semantic_values
-
# Recommendation: Use field names from Dublin Core
-
1
use_extension(Blacklight::Document::DublinCore)
-
-
# This fails to load.
-
# use_extension(::ScholarsArchive::Document::QualifiedDublinCore)
-
-
# Do content negotiation for AF models.
-
-
1
use_extension( Hydra::ContentNegotiation )
-
-
1
def self.solrized_methods(property_names)
-
1
property_names.each do |property_name|
-
83
define_method property_name.to_sym do
-
self[Solrizer.solr_name(property_name)]
-
end
-
end
-
end
-
-
1
def academic_affiliation_label
-
# references to ScholarsArchive raise ActionView::Template::Error (uninitialized constant SolrDocument::ScholarsArchive)
-
# ScholarsArchive::LabelParserService.parse_label_uris(self['academic_affiliation_label_ssim'])
-
self['academic_affiliation_label_ssim']
-
end
-
-
1
def curation_notes_admin_label
-
self['curation_notes_admin_label_ssim']
-
end
-
-
1
def curation_notes_user_label
-
self['curation_notes_user_label_ssim']
-
end
-
-
1
def date_published_label
-
self['date_published_dtsim']
-
end
-
-
1
def degree_field_label
-
# references to ScholarsArchive raise ActionView::Template::Error (uninitialized constant SolrDocument::ScholarsArchive)
-
# ScholarsArchive::LabelParserService.parse_label_uris(self['degree_field_label_ssim'])
-
self['degree_field_label_ssim']
-
end
-
-
1
def degree_grantors_label
-
# references to ScholarsArchive raise ActionView::Template::Error (uninitialized constant SolrDocument::ScholarsArchive)
-
# ScholarsArchive::LabelParserService.parse_label_uris(self['degree_grantors_label_ssim'])
-
self['degree_grantors_label_ssim']
-
end
-
-
1
def doi_label
-
self['doi_label_ssim']
-
end
-
-
1
def access_deepblue_label
-
self['access_deepblue_label_ssim']
-
end
-
-
1
def fundedby_label
-
self['fundedby_label_ssim']
-
end
-
-
1
def fundedby_other_label
-
self['fundedby_other_label_ssim']
-
end
-
-
1
def grantnumber_label
-
self['grantnumber_label_ssim']
-
end
-
-
1
def referenced_by_label
-
self['referenced_by_label_ssim']
-
end
-
-
1
def language_label
-
self['language_label_ssim']
-
end
-
-
1
def license_label
-
self['license_label_ssim']
-
end
-
-
1
def methodology_label
-
self['methodology_label_ssim']
-
end
-
-
1
def nested_geo
-
self[Solrizer.solr_name('nested_geo_label', :symbol)] || []
-
end
-
-
1
def nested_related_items_label
-
# references to ScholarsArchive raise ActionView::Template::Error (uninitialized constant SolrDocument::ScholarsArchive)
-
# ScholarsArchive::LabelParserService.parse_label_uris(self[Solrizer.solr_name('nested_related_items_label', :symbol)]) || []
-
self[Solrizer.solr_name('nested_related_items_label', :symbol)] || []
-
end
-
-
1
def other_affiliation_label
-
# references to ScholarsArchive raise ActionView::Template::Error (uninitialized constant SolrDocument::ScholarsArchive)
-
# ScholarsArchive::LabelParserService.parse_label_uris(self['other_affiliation_label_ssim'])
-
self['other_affiliation_label_ssim']
-
end
-
-
1
def peerreviewed_label
-
self['peerreviewed_label_ssim']
-
end
-
-
1
def prior_identifier_label
-
self['prior_identifier_label_ssim']
-
end
-
-
1
def rights_license_label
-
self['rights_license_label_ssim']
-
end
-
-
1
def rights_license_other_label
-
self['rights_license_other_label_ssim']
-
end
-
-
1
def rights_statement_label
-
self['rights_statement_label_ssim']
-
end
-
-
1
def subject_discipline_label
-
self['subject_discipline_label_ssim']
-
end
-
-
1
def system_created
-
Time.parse self['system_create_dtsi']
-
end
-
-
1
def geo_location_place
-
self['geo_location_place_tesim'] || []
-
end
-
-
1
def geo_location_box
-
self['geo_location_box_tesim'] || []
-
end
-
-
1
def license_other
-
self['license_other_tesim'] || []
-
end
-
-
1
solrized_methods [
-
'abstract',
-
'academic_affiliation',
-
'additional_information',
-
'description_abstract',
-
'language_none',
-
'peerreviewed',
-
'alt_title',
-
'bibliographic_citation',
-
'conference_location',
-
'conference_name',
-
'conference_section',
-
'contributor_advisor',
-
'contributor_affiliationumcampus',
-
'contributor_author',
-
'contributor_committeemember',
-
'curation_notes_admin',
-
'curation_notes_user',
-
'date_accepted',
-
'date_available',
-
'date_collected',
-
'date_copyright',
-
'date_issued',
-
'date_published',
-
'date_reviewed',
-
'date_submitted',
-
'date_valid',
-
'degree_discipline',
-
'degree_field',
-
'degree_grantors',
-
'degree_level',
-
'degree_name',
-
'description_mapping',
-
'description_sponsorship',
-
'description_thesisdegreediscipline',
-
'description_thesisdegreegrantor',
-
'description_thesisdegreename',
-
'digitization_spec',
-
'doi',
-
'dspace_collection',
-
'dspace_community',
-
'duration',
-
'editor',
-
'embargo_reason',
-
'file_extent',
-
'file_format',
-
'fundedby',
-
'fundedby_other',
-
'funding_body',
-
'funding_statement',
-
'graduation_year',
-
'grantnumber',
-
'has_journal',
-
'has_number',
-
'has_volume',
-
'hydrologic_unit_code',
-
'identifier',
-
'identifier_orcid',
-
'identifier_source',
-
'identifier_uniqname',
-
'in_series',
-
'interactivity_type',
-
'is_based_on_url',
-
'referenced_by',
-
'isbn',
-
'issn',
-
'language',
-
'learning_resource_type',
-
'methodology',
-
'other_affiliation',
-
'prior_identifier',
-
'relation_ispartofseries',
-
'replaces',
-
'rights_license',
-
'rights_license_other',
-
'subject_discipline',
-
'subject_other',
-
'tableofcontents',
-
'time_required',
-
'type_none',
-
'typical_age_range',
-
'virus_scan_service',
-
'virus_scan_status',
-
'virus_scan_status_date'
-
]
-
-
1
field_semantics.merge!(
-
contributor: [ 'contributor_tesim',
-
'editor_tesim',
-
'contributor_advisor_tesim',
-
'contributor_committeemember_tesim',
-
'oai_academic_affiliation_label',
-
'oai_other_affiliation_label' ],
-
coverage: ['based_near_label_tesim', 'conferenceLocation_tesim'],
-
creator: 'creator_tesim',
-
date: 'date_created_tesim',
-
description: ['description_tesim', 'abstract_tesim'],
-
format: ['file_extent_tesim', 'file_format_tesim'],
-
identifier: 'oai_identifier',
-
language: 'language_label_tesim',
-
publisher: 'publisher_tesim',
-
relation: 'oai_nested_related_items_label',
-
rights: 'oai_rights',
-
source: ['source_tesim', 'isBasedOnUrl_tesim'],
-
subject: ['subject_tesim', 'keyword_tesim'],
-
title: 'title_tesim',
-
type: 'resource_type_tesim'
-
)
-
-
-
# Override SolrDocument hash access for certain virtual fields
-
1
def [](key)
-
5
return send(key) if [ 'oai_academic_affiliation_label',
-
'oai_other_affiliation_label',
-
'oai_rights',
-
'oai_identifier',
-
'oai_nested_related_items_label' ].include?(key)
-
5
super
-
end
-
-
1
def sets
-
fetch('isPartOf', []).map { |m| BlacklightOaiProvider::Set.new("isPartOf_ssim:#{m}") }
-
end
-
-
1
def oai_nested_related_items_label
-
related_items = []
-
nested_related_items_label&.each do |r|
-
related_items << r["label"] + ': ' + r["uri"]
-
end
-
related_items
-
end
-
-
1
def oai_academic_affiliation_label
-
aa_labels = []
-
academic_affiliation_label&.each do |a|
-
aa_labels << a["label"]
-
end
-
aa_labels
-
end
-
-
1
def oai_other_affiliation_label
-
oa_labels = []
-
other_affiliation_label&.each do |o|
-
oa_labels << o["label"]
-
end
-
oa_labels
-
end
-
-
# Only return License if present, otherwise Rights
-
1
def oai_rights
-
license_label ? license_label : rights_statement_label
-
end
-
-
1
def oai_identifier
-
if self["has_model_ssim"].first.to_s == "Collection"
-
Hyrax::Engine.routes.url_helpers.url_for( only_path: false,
-
action: 'show',
-
host: CatalogController.blacklight_config.oai[:provider][:repository_url],
-
controller: 'hyrax/collections',
-
id: id )
-
else
-
Rails.application.routes.url_helpers.url_for( only_path: false,
-
action: 'show',
-
host: CatalogController.blacklight_config.oai[:provider][:repository_url],
-
controller: 'hyrax/' + self["has_model_ssim"].first.to_s.underscore.pluralize,
-
id: id )
-
end
-
end
-
-
end
-
1
class User < ApplicationRecord
-
# Connects this user object to Hydra behaviors.
-
1
include Hydra::User
-
# Connects this user object to Role-management behaviors.
-
1
include Hydra::RoleManagement::UserRoles
-
# Connects this user object to Hyrax behaviors.
-
1
include Hyrax::User
-
1
include Hyrax::UserUsageStats
-
-
# Use the http header as auth. This app will be behind a reverse proxy
-
# that will take care of the authentication.
-
1
Devise.add_module(:http_header_authenticatable,
-
strategy: true,
-
controller: :sessions,
-
model: 'devise/models/http_header_authenticatable')
-
-
1
if Rails.configuration.authentication_method == "iu"
-
1
devise :omniauthable, :omniauth_providers => [:cas]
-
1
alias_attribute :ldap_lookup_key, :uid
-
1
include LDAPGroupsLookup::Behavior
-
else
-
devise :database_authenticatable, :registerable,
-
:recoverable, :rememberable, :trackable, :validatable
-
end
-
1
if Blacklight::Utils.needs_attr_accessible?
-
attr_accessible :email, :password, :password_confirmation
-
end
-
# Connects this user object to Blacklights Bookmarks.
-
1
include Blacklight::User
-
# Include default devise modules. Others available are:
-
# :confirmable, :lockable, :timeoutable and :omniauthable
-
-
# Method added by Blacklight; Blacklight uses #to_s on your
-
# user class to get a user-displayable login/identifier for
-
# the account.
-
1
def to_s
-
email
-
end
-
-
# helper for IU auth
-
1
def self.find_for_iu_cas(auth)
-
where(provider: auth.provider, uid: auth.uid).first_or_create! do |user|
-
user.provider = auth.provider
-
user.uid = auth.uid
-
user.email = [auth.uid,'@iu.edu'].join
-
user.encrypted_password = Devise.friendly_token[0,20]
-
end
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
module Datacore
-
1
module PresentsArchiveFile
-
-
# archive files bypass fedora storage
-
1
def archive_file?
-
mime_type.to_s.match(/^message\/external-body\;.*access-type=URL/).present?
-
end
-
-
# needs to pass through archive_file in case any / to %2F encoding happened there
-
1
def archive_request_url
-
@archive_request_url ||= begin
-
return '/' unless archive_file?
-
archive_file.send(:request_url)
-
end
-
end
-
-
1
def archive_status_url
-
archive_request_url.sub('/request/', '/status/')
-
end
-
-
1
def archive_file
-
@archive_file ||=
-
if archive_file?
-
# nested objects may be stored in format:
-
# /sda/request/<collection>/<subdir>%2F<object>
-
# which works rails routing, but we need to force-encode the final '/' if stored as:
-
# /sda/request/<collection>/<subdir>/<object>
-
collection_and_object = mime_type.split('"').last.sub('/sda/request/', '').split('/')
-
collection = collection_and_object.first
-
object = collection_and_object[1, collection_and_object.size].join('%2F')
-
ArchiveFile.new(collection: collection, object: object)
-
end
-
end
-
1
delegate :display_status, :request_action, :request_actionable?, :request_for_staging?, :status_in_ui, to: :archive_file, allow_nil: true
-
1
alias_method :archive_status_description, :display_status
-
1
alias_method :archive_status_code, :status_in_ui
-
-
1
def provide_request_email?
-
Settings.archive_api.provide_email.present?
-
end
-
-
1
def require_request_email?
-
Settings.archive_api.provide_email == :required
-
end
-
end
-
end
-
# frozen_string_literal: true
-
-
1
class GuestUserMessagePresenter
-
-
1
attr_accessor :controller
-
-
1
def initialize( controller: )
-
1
@controller = controller
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
module Hyrax
-
-
# TODO: monkey patch this to only override and provide necessary behavior
-
1
module CharacterizationBehavior
-
1
extend ActiveSupport::Concern
-
-
1
class_methods do
-
-
1
def characterization_terms
-
1
[
-
:byte_order,
-
:compression,
-
:height,
-
:width,
-
:height,
-
:color_space,
-
:profile_name,
-
:profile_version,
-
:orientation,
-
:color_map,
-
:image_producer,
-
:capture_device,
-
:scanning_software,
-
:gps_timestamp,
-
:latitude,
-
:longitude,
-
:file_format,
-
:file_title,
-
:page_count,
-
:duration,
-
:sample_rate,
-
:format_label,
-
# :file_size, # replace this with
-
:file_size_human_readable, # replaces file size
-
:filename,
-
:well_formed,
-
:last_modified,
-
:original_checksum, # TODO: revisit this...
-
:mime_type
-
]
-
end
-
-
1
def characterization_terms_admin_only
-
1
%i[
-
virus_scan_service
-
virus_scan_status
-
virus_scan_status_date
-
]
-
end
-
-
end
-
-
1
included do
-
1
delegate( *characterization_terms, to: :solr_document )
-
1
delegate( *characterization_terms_admin_only, to: :solr_document )
-
end
-
-
1
def characterized?
-
!characterization_metadata.values.compact.empty?
-
end
-
-
1
def characterization_metadata
-
@characterization_metadata ||= build_characterization_metadata
-
end
-
-
1
def characterization_metadata_admin_only
-
@characterization_metadata_admin_only ||= build_characterization_metadata_admin_only
-
end
-
-
# Override this if you want to inject additional characterization metadata
-
# Use a hash of key/value pairs where the value is an Array or String
-
# {
-
# term1: ["value"],
-
# term2: ["value1", "value2"],
-
# term3: "a string"
-
# }
-
1
def additional_characterization_metadata
-
@additional_characterization_metadata ||= {}
-
end
-
-
1
def additional_characterization_metadata_admin_only
-
@additional_characterization_metadata_admin_only ||= {}
-
end
-
-
1
def label_for_term( term )
-
MsgHelper.t( "show.file_set.label.#{term}", raise: true )
-
rescue I18n::MissingTranslationData => e
-
term.to_s.titleize
-
end
-
-
# Returns an array of characterization values truncated to 250 characters limited
-
# to the maximum number of configured values.
-
# @param [Symbol] term found in the characterization_metadata hash
-
# @return [Array] of truncated values
-
1
def primary_characterization_values( term )
-
values = values_for( term )
-
values.slice!(Hyrax.config.fits_message_length, (values.length - Hyrax.config.fits_message_length))
-
truncate_all(values)
-
end
-
-
# Returns an array of characterization values truncated to 250 characters limited
-
# to the maximum number of configured values.
-
# @param [Symbol] term found in the characterization_metadata hash
-
# @return [Array] of truncated values
-
1
def primary_characterization_values_admin_only( term )
-
values = values_for_admin_only( term )
-
values.slice!(Hyrax.config.fits_message_length, (values.length - Hyrax.config.fits_message_length))
-
truncate_all(values)
-
end
-
-
# Returns an array of characterization values truncated to 250 characters that are in
-
# excess of the maximum number of configured values.
-
# @param [Symbol] term found in the characterization_metadata hash
-
# @return [Array] of truncated values
-
1
def secondary_characterization_values(term)
-
values = values_for(term)
-
additional_values = values.slice(Hyrax.config.fits_message_length, values.length - Hyrax.config.fits_message_length)
-
return [] unless additional_values
-
truncate_all(additional_values)
-
end
-
-
# Returns an array of characterization values truncated to 250 characters that are in
-
# excess of the maximum number of configured values.
-
# @param [Symbol] term found in the characterization_metadata hash
-
# @return [Array] of truncated values
-
1
def secondary_characterization_values_admin_only(term)
-
values = values_for_admin_only(term)
-
additional_values = values.slice(Hyrax.config.fits_message_length, values.length - Hyrax.config.fits_message_length)
-
return [] unless additional_values
-
truncate_all(additional_values)
-
end
-
-
1
private
-
-
1
def values_for( term )
-
Array.wrap( characterization_metadata[term] )
-
end
-
-
1
def values_for_admin_only( term )
-
Array.wrap( characterization_metadata_admin_only[term] )
-
end
-
-
1
def truncate_all(values)
-
values.map { |v| v.to_s.truncate(250) }
-
end
-
-
1
def build_characterization_metadata
-
self.class.characterization_terms.each do |term|
-
value = send(term)
-
additional_characterization_metadata[term.to_sym] = value if value.present?
-
end
-
additional_characterization_metadata
-
end
-
-
1
def build_characterization_metadata_admin_only
-
self.class.characterization_terms_admin_only.each do |term|
-
value = send(term)
-
additional_characterization_metadata_admin_only[term.to_sym] = value if value.present?
-
end
-
additional_characterization_metadata_admin_only
-
end
-
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
module Hyrax
-
-
1
class CollectionPresenter
-
1
include ModelProxy
-
1
include PresentsAttributes
-
1
include ActionView::Helpers::NumberHelper
-
1
include ::Hyrax::BrandingHelper
-
1
attr_accessor :solr_document, :current_ability, :request
-
1
attr_reader :subcollection_count
-
1
attr_accessor :parent_collections # This is expected to be a Blacklight::Solr::Response with all of the parent collections
-
1
attr_writer :collection_type
-
-
1
class_attribute :create_work_presenter_class
-
1
self.create_work_presenter_class = Hyrax::SelectTypeListPresenter
-
-
# @param [SolrDocument] solr_document
-
# @param [Ability] current_ability
-
# @param [ActionDispatch::Request] request the http request context
-
1
def initialize(solr_document, current_ability, request = nil)
-
@solr_document = solr_document
-
@current_ability = current_ability
-
@request = request
-
@subcollection_count = 0
-
end
-
-
# CurationConcern methods
-
1
delegate :stringify_keys, :human_readable_type, :collection?, :representative_id,
-
:to_s, to: :solr_document
-
-
1
delegate(*Hyrax::CollectionType.collection_type_settings_methods, to: :collection_type, prefix: :collection_type_is)
-
-
1
def collection_type
-
@collection_type ||= Hyrax::CollectionType.find_by_gid!(collection_type_gid)
-
end
-
-
# Metadata Methods
-
1
delegate :title, :description, :creator, :contributor, :subject, :publisher, :keyword, :language, :embargo_release_date,
-
:lease_expiration_date, :license, :date_created, :resource_type, :based_near, :related_url, :identifier, :thumbnail_path,
-
:title_or_label, :collection_type_gid, :create_date, :modified_date, :visibility, :edit_groups, :edit_people,
-
to: :solr_document
-
-
# Terms is the list of fields displayed by
-
# app/views/collections/_show_descriptions.html.erb
-
1
def self.terms
-
[:total_items, :size, :resource_type, :creator, :contributor, :keyword, :license, :publisher, :date_created, :subject,
-
:language, :identifier, :based_near, :related_url]
-
end
-
-
1
def terms_with_values
-
self.class.terms.select { |t| self[t].present? }
-
end
-
-
1
def [](key)
-
case key
-
when :size
-
size
-
when :total_items
-
total_items
-
else
-
solr_document.send key
-
end
-
end
-
-
# begin display_provenance_log
-
-
1
def display_provenance_log_enabled?
-
true
-
end
-
-
1
def provenance_log_entries?
-
file_path = Deepblue::ProvenancePath.path_for_reference( id )
-
File.exist? file_path
-
end
-
-
# end display_provenance_log
-
-
1
def relative_url_root
-
rv = ::DeepBlueDocs::Application.config.relative_url_root
-
return rv if rv
-
''
-
end
-
-
1
def size
-
number_to_human_size(@solr_document['bytes_lts'])
-
end
-
-
1
def total_items
-
ActiveFedora::Base.where("member_of_collection_ids_ssim:#{id}").count
-
end
-
-
1
def total_viewable_items
-
ActiveFedora::Base.where("member_of_collection_ids_ssim:#{id}").accessible_by(current_ability).count
-
end
-
-
1
def total_viewable_works
-
::Deepblue::LoggingHelper.bold_debug [ ::Deepblue::LoggingHelper.here,
-
::Deepblue::LoggingHelper.called_from,
-
"id=#{id}",
-
"current_ability=#{current_ability}",
-
"" ]
-
ActiveFedora::Base.where("member_of_collection_ids_ssim:#{id} AND generic_type_sim:Work").accessible_by(current_ability).count
-
end
-
-
1
def total_viewable_collections
-
ActiveFedora::Base.where("member_of_collection_ids_ssim:#{id} AND generic_type_sim:Collection").accessible_by(current_ability).count
-
end
-
-
1
def collection_type_badge
-
collection_type.title
-
end
-
-
# The total number of parents that this collection belongs to, visible or not.
-
1
def total_parent_collections
-
parent_collections.nil? ? 0 : parent_collections.response['numFound']
-
end
-
-
# The number of parent collections shown on the current page. This will differ from total_parent_collections
-
# due to pagination.
-
1
def parent_collection_count
-
parent_collections.nil? ? 0 : parent_collections.documents.size
-
end
-
-
1
def user_can_nest_collection?
-
current_ability.can?(:deposit, solr_document)
-
end
-
-
1
def user_can_create_new_nest_collection?
-
current_ability.can?(:create_collection_of_type, collection_type)
-
end
-
-
1
def show_path
-
Hyrax::Engine.routes.url_helpers.dashboard_collection_path(id)
-
end
-
-
1
def banner_file
-
branding_banner_file( id: id )
-
end
-
-
1
def logo_record
-
branding_logo_record( id: id )
-
end
-
-
# A presenter for selecting a work type to create
-
# this is needed here because the selector is in the header on every page
-
1
def create_work_presenter
-
@create_work_presenter ||= create_work_presenter_class.new(current_ability.current_user)
-
end
-
-
1
def create_many_work_types?
-
if Flipflop.only_use_data_set_work_type?
-
false
-
else
-
create_work_presenter.many?
-
end
-
end
-
-
1
def draw_select_work_modal?
-
create_many_work_types?
-
end
-
-
1
def first_work_type
-
create_work_presenter.first_model
-
end
-
-
1
def available_parent_collections(scope:)
-
return @available_parents if @available_parents.present?
-
collection = Collection.find(id)
-
colls = Hyrax::Collections::NestedCollectionQueryService.available_parent_collections(child: collection, scope: scope, limit_to_id: nil)
-
@available_parents = colls.map do |col|
-
{ "id" => col.id, "title_first" => col.title.first }
-
end
-
@available_parents.to_json
-
end
-
-
1
def subcollection_count=(total)
-
@subcollection_count = total unless total.nil?
-
end
-
-
# For the Managed Collections tab, determine the label to use for the level of access the user has for this admin set.
-
# Checks from most permissive to most restrictive.
-
# @return String the access label (e.g. Manage, Deposit, View)
-
1
def managed_access
-
return I18n.t('hyrax.dashboard.my.collection_list.managed_access.manage') if current_ability.can?(:edit, solr_document)
-
return I18n.t('hyrax.dashboard.my.collection_list.managed_access.deposit') if current_ability.can?(:deposit, solr_document)
-
return I18n.t('hyrax.dashboard.my.collection_list.managed_access.view') if current_ability.can?(:read, solr_document)
-
''
-
end
-
-
# Determine if the user can perform batch operations on this collection. Currently, the only
-
# batch operation allowed is deleting, so this is equivalent to checking if the user can delete
-
# the collection determined by criteria...
-
# * user must be able to edit the collection to be able to delete it
-
# * the collection does not have to be empty
-
# @return Boolean true if the user can perform batch actions; otherwise, false
-
1
def allow_batch?
-
return true if current_ability.can?(:edit, solr_document)
-
false
-
end
-
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
module Hyrax
-
-
1
class DataSetPresenter < DeepbluePresenter
-
-
1
delegate :authoremail,
-
:curation_notes_admin,
-
:curation_notes_user,
-
:date_coverage,
-
:date_published, :date_published2, # FIXME; investigate
-
:doi, :doi_the_correct_one, # FIXME: investigate
-
:doi_minted?,
-
:doi_minting_enabled?,
-
:doi_pending?,
-
:fundedby,
-
:fundedby_other,
-
:grantnumber,
-
:methodology,
-
:prior_identifier,
-
:referenced_by,
-
:rights_license,
-
:rights_license_other,
-
:subject_discipline,
-
:total_file_size,
-
:access_deepblue,
-
:geo_location_place,
-
:geo_location_box,
-
:license_other,
-
:academic_affiliation,
-
:alt_title,
-
:bibliographic_citation,
-
:contributor_affiliationumcampus,
-
:date_attribute,
-
:date_issued,
-
:description_abstract,
-
:description_mapping,
-
:description_sponsorship,
-
:external_link,
-
:identifier_orcid,
-
:identifier_source,
-
:language_none,
-
:linked,
-
:other_affiliation,
-
:peerreviewed,
-
:relation_ispartofseries,
-
:type_none,
-
to: :solr_document
-
-
# def initialize( solr_document, current_ability, request = nil )
-
# ::Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
# Deepblue::LoggingHelper.called_from,
-
# Deepblue::LoggingHelper.obj_class( 'class', self ),
-
# "solr_document = #{solr_document}",
-
# "solr_document.class.name = #{solr_document.class.name}",
-
# "current_ability = #{current_ability}",
-
# "request = #{request}",
-
# "" ]
-
# super( solr_document, current_ability, request )
-
# ::Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
# Deepblue::LoggingHelper.called_from,
-
# Deepblue::LoggingHelper.obj_class( 'class', self ),
-
# "@solr_document.class.name = #{@solr_document.class.name}",
-
# "@solr_document.doi = #{@solr_document.doi}",
-
# "@solr_document.doi_the_correct_one = #{@solr_document.doi_the_correct_one}",
-
# "@solr_document.doi_minted? = #{@solr_document.doi_minted?}",
-
# "@solr_document.doi_minting_enabled? = #{@solr_document.doi_minting_enabled?}",
-
# "@solr_document.doi_pending? = #{@solr_document.doi_pending?}",
-
# "" ]
-
# end
-
-
# begin box
-
-
1
def box_enabled?
-
DeepBlueDocs::Application.config.box_integration_enabled
-
end
-
-
1
def box_link( only_if_exists_in_box: false )
-
return nil unless box_enabled?
-
concern_id = @solr_document.id
-
return ::BoxHelper.box_link( concern_id, only_if_exists_in_box: only_if_exists_in_box )
-
end
-
-
1
def box_link_display_for_work?( current_user )
-
return false unless box_enabled?
-
rv = ::BoxHelper.box_link_display_for_work?( work_id: @solr_document.id,
-
work_file_count: total_file_count,
-
is_admin: current_ability.admin?,
-
user_email: Deepblue::EmailHelper.user_email_from( current_user ) )
-
return rv
-
end
-
-
# end box
-
-
# display date range as from_date To to_date
-
1
def date_coverage
-
solr_value = @solr_document.date_coverage
-
return nil if solr_value.blank?
-
return solr_value.sub( "/open", "" ) if solr_value.match "/open" # rubocop:disable Performance/RedundantMatch, Performance/RegexpMatch
-
solr_value.sub( "/", " to " )
-
end
-
-
# begin display_provenance_log
-
-
1
def display_provenance_log_enabled?
-
true
-
end
-
-
1
def provenance_log_entries?
-
file_path = Deepblue::ProvenancePath.path_for_reference( id )
-
File.exist? file_path
-
end
-
-
# end display_provenance_log
-
-
# begin doi
-
#
-
# def doi
-
# solr_value = @solr_document[Solrizer.solr_name('doi', :symbol)]
-
# return nil if solr_value.blank?
-
# solr_value.first
-
# end
-
#
-
# def doi_minted?
-
# !doi.nil?
-
# rescue
-
# nil
-
# end
-
#
-
# def doi_pending?
-
# doi == ::Deepblue::DoiBehavior::DOI_PENDING
-
# end
-
#
-
# def mint_doi_enabled?
-
# true
-
# end
-
#
-
# end doi
-
-
# begin globus
-
-
1
def globus_download_enabled?
-
DeepBlueDocs::Application.config.globus_enabled
-
end
-
-
1
def globus_enabled?
-
DeepBlueDocs::Application.config.globus_enabled
-
end
-
-
1
def globus_external_url
-
concern_id = @solr_document.id
-
::GlobusJob.external_url concern_id
-
end
-
-
1
def globus_files_available?
-
concern_id = @solr_document.id
-
::GlobusJob.files_available? concern_id
-
end
-
-
1
def globus_files_prepping?
-
concern_id = @solr_document.id
-
::GlobusJob.files_prepping? concern_id
-
end
-
-
1
def globus_last_error_msg
-
concern_id = @solr_document.id
-
::GlobusJob.error_file_contents concern_id
-
end
-
-
# end globus
-
-
1
def hdl
-
# @object_profile[:hdl]
-
end
-
-
1
def human_readable( value )
-
ActiveSupport::NumberHelper::NumberToHumanSizeConverter.convert( value, precision: 3 )
-
end
-
-
1
def label_with_total_file_size( label )
-
total = total_file_size
-
return label if total.zero?
-
count = total_file_count
-
files = 1 == count ? 'file' : 'files'
-
"#{label} (#{total_file_size_human_readable} in #{count} #{files})"
-
end
-
-
# begin tombstone
-
-
1
def tombstone
-
return nil if @solr_document.blank?
-
solr_value = @solr_document[Solrizer.solr_name('tombstone', :symbol)]
-
return nil if solr_value.blank?
-
solr_value.first
-
end
-
-
1
def tombstone_enabled?
-
true
-
end
-
-
# end tombstone
-
-
1
def total_file_count
-
solr_value = @solr_document[Solrizer.solr_name('file_set_ids', :symbol)]
-
return 0 if solr_value.blank?
-
solr_value.size
-
end
-
-
1
def total_file_size
-
solr_value = @solr_document[Solrizer.solr_name('total_file_size', Hyrax::FileSetIndexer::STORED_LONG)]
-
return 0 if solr_value.blank?
-
solr_value
-
end
-
-
1
def total_file_size_human_readable
-
human_readable( total_file_size )
-
end
-
-
1
def zip_download_enabled?
-
Settings.zip_download_enabled
-
end
-
end
-
end
-
# frozen_string_literal: true
-
-
1
module Hyrax
-
-
1
class DeepbluePresenter < Hyrax::WorkShowPresenter
-
-
1
def box_enabled?
-
false
-
end
-
-
1
def display_provenance_log_enabled?
-
false
-
end
-
-
1
def doi_minting_enabled?
-
false
-
end
-
-
1
def globus_download_enabled?
-
false
-
end
-
-
1
def human_readable_type
-
1
"Work"
-
end
-
-
# def mint_doi_enabled?
-
# false
-
# end
-
-
# def tombstone_enabled?
-
# false
-
# end
-
-
1
def zip_download_enabled?
-
false
-
end
-
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
module Hyrax
-
1
class DissertationPresenter < DeepbluePresenter
-
end
-
end
-
# frozen_string_literal: true
-
-
1
module Hyrax
-
1
class DsFileSetPresenter < Hyrax::FileSetPresenter
-
1
include ::Datacore::PresentsArchiveFile
-
-
1
delegate :doi, :doi_the_correct_one,
-
:doi_minted?,
-
:doi_minting_enabled?,
-
:doi_pending?,
-
:file_size,
-
:file_size_human_readable,
-
:original_checksum,
-
:mime_type,
-
:title,
-
:virus_scan_service,
-
:virus_scan_status,
-
:virus_scan_status_date, to: :solr_document
-
-
# def doi_minted?
-
# # the first time this is called, doi will not be in solr.
-
# @solr_document[ Solrizer.solr_name( 'doi', :symbol ) ].first
-
# rescue
-
# nil
-
# end
-
#
-
# def doi_pending?
-
# @solr_document[ Solrizer.solr_name( 'doi', :symbol ) ].first == ::Deepblue::DoiBehavior::DOI_PENDING
-
# end
-
-
1
def relative_url_root
-
rv = ::DeepBlueDocs::Application.config.relative_url_root
-
return rv if rv
-
''
-
end
-
-
1
def parent_doi_minted?
-
g = DataSet.find parent.id
-
g.doi_minted?
-
end
-
-
# begin display_provenance_log
-
-
1
def display_provenance_log_enabled?
-
true
-
end
-
-
1
def provenance_log_entries?
-
file_path = Deepblue::ProvenancePath.path_for_reference( id )
-
File.exist? file_path
-
end
-
-
# end display_provenance_log
-
-
1
def parent_public?
-
g = DataSet.find parent.id
-
g.public?
-
end
-
-
1
def first_title
-
title.first || 'File'
-
end
-
-
# To handle large files.
-
1
def link_name
-
if ( current_ability.admin? || current_ability.can?(:read, id) )
-
first_title
-
else
-
'File'
-
end
-
end
-
-
1
def file_name( parent_presenter, link_to )
-
if parent_presenter.tombstone.present?
-
rv = link_name
-
elsif file_size_too_large_to_download?
-
rv = link_name
-
else
-
rv = link_to
-
end
-
return rv
-
end
-
-
1
def file_size_too_large_to_download?
-
!@solr_document.file_size.nil? && @solr_document.file_size >= DeepBlueDocs::Application.config.max_work_file_size_to_download
-
end
-
end
-
end
-
# Generated via
-
# `rails generate hyrax:work GenericWork`
-
1
module Hyrax
-
1
class GenericWorkPresenter < Hyrax::WorkShowPresenter
-
-
1
delegate :identifier_orcid, :academic_affiliation, :other_affiliation, :contributor_affiliationumcampus, :alt_title, :date_issued, :identifier_source, :peerreviewed, :bibliographic_citation, :relation_ispartofseries, :rights_statement, :type_none, :language_none, :description_mapping, :description_abstract, :description_sponsorship, :description, to: :solr_document
-
-
-
end
-
end
-
# frozen_string_literal: true
-
-
1
require File.join(Gem::Specification.find_by_name("hyrax").full_gem_path, "app/presenters/hyrax/work_show_presenter.rb")
-
-
# monkey patch Hyrax::WorkShowPresenter
-
1
module Hyrax
-
-
1
class WorkShowPresenter
-
-
1
def relative_url_root
-
1
rv = ::DeepBlueDocs::Application.config.relative_url_root
-
1
return rv if rv
-
1
''
-
end
-
-
1
def page_title
-
1
part1 = human_readable_type
-
1
part1 = "Data Set" if part1 == "Work"
-
1
"#{part1} | #{title.first} | ID: #{id} | #{I18n.t('hyrax.product_name')}"
-
end
-
-
1
def tombstone
-
solr_value = @solr_document[Solrizer.solr_name('tombstone', :symbol)]
-
return nil if solr_value.blank?
-
solr_value.first
-
end
-
-
1
def tombstone_enabled?
-
true
-
end
-
-
end
-
-
end
-
1
require 'date'
-
1
require 'edtf'
-
-
1
module Dataset
-
1
class DateCoverageService
-
-
# EDTF's notation for interval begin and end when unspecified.
-
1
EDTF_OPEN_BEGIN = :unknown
-
1
EDTF_OPEN_END = :open
-
-
1
class << self
-
# Given a parameters hash with date coverage keys
-
# Returns an EDTF Interval or nil.
-
1
def params_to_interval( params )
-
7
begin_date = make_begin_date(
-
year: safe_to_i(params[:date_coverage_begin_year]),
-
month: safe_to_i(params[:date_coverage_begin_month]),
-
day: safe_to_i(params[:date_coverage_begin_day])
-
)
-
7
end_date = make_end_date(
-
year: safe_to_i(params[:date_coverage_end_year]),
-
month: safe_to_i(params[:date_coverage_end_month]),
-
day: safe_to_i(params[:date_coverage_end_day])
-
)
-
7
interv = EDTF::Interval.new(begin_date, end_date)
-
7
interv unless interval_reversed?(interv) || interval_both_ends_open?(interv)
-
end
-
-
# Given an EDTF Interval
-
# Returns a corresponding hash of parameters for an edit form.
-
1
def interval_to_params( interv )
-
3
return unless interv.is_a?(EDTF::Interval)
-
2
return if interval_reversed?(interv)
-
1
make_begin_params(interv.begin).merge(make_end_params(interv.end))
-
end
-
-
1
private
-
-
# Determines if an intervals endings are reverse. Specifically, where the begin
-
# date is later that the end date. An interval with reversed starting and ending
-
# dates will have from and to attributes, but begin and end will be nil
-
# == Parameters:
-
# interv::
-
# [EDTF::Interval] The interval to be tested.
-
# == Returns:
-
# [Boolean] True if the interval boundaries were reversed
-
1
def interval_reversed?(interv)
-
9
return false if interv.from == EDTF_OPEN_BEGIN || interv.to == EDTF_OPEN_END
-
6
(interv.from && !interv.begin) && (interv.to && !interv.end)
-
end
-
-
1
def interval_both_ends_open?(interv)
-
6
interv.from == EDTF_OPEN_BEGIN && interv.to == EDTF_OPEN_END
-
end
-
-
1
def make_begin_params(date)
-
1
hsh = date_to_params date
-
1
{date_coverage_begin_year: hsh[:year],
-
date_coverage_begin_month: hsh[:month],
-
date_coverage_begin_day: hsh[:day]}
-
end
-
-
1
def make_end_params( date )
-
1
hsh = date_to_params date
-
1
{date_coverage_end_year: hsh[:year],
-
date_coverage_end_month: hsh[:month],
-
date_coverage_end_day: hsh[:day]}
-
end
-
-
# Given edtf date
-
# return hash with possible keys :year, :month, :day
-
1
def date_to_params( date )
-
2
return {} unless date
-
2
hsh = case date.precision
-
when :year
-
{year: date.year}
-
when :month
-
{year: date.year, month: date.month}
-
when :day
-
2
{year: date.year, month: date.month, day: date.day}
-
end
-
2
hsh[:year] = year_to_s hsh[:year]
-
8
hsh.each{|k,v| hsh[k] = v.to_s}
-
end
-
-
1
def year_to_s(val)
-
2
str = val.abs.to_s.rjust(4,"0")
-
2
val < 0 ? "-" + str : str
-
end
-
-
# Given input
-
# Return the corresponding integer or nil
-
1
def safe_to_i(input)
-
60
Integer(input,10) rescue nil
-
end
-
-
# Until 8601-2 gets finalized, edtf ranges do not accept :open for begin.
-
1
def make_begin_date(year: nil, month: nil, day: nil)
-
7
make_date(year: year, month: month, day: day) || EDTF_OPEN_BEGIN
-
end
-
-
1
def make_end_date(year: nil, month: nil, day: nil)
-
7
make_date(year: year, month: month, day: day) || EDTF_OPEN_END
-
end
-
-
# Takes year, month, and day integers.
-
# Returns edtf date or nil
-
1
def make_date(year: nil, month: nil, day: nil)
-
14
if year && month && day
-
6
Date.new(year,month,day).day_precision!
-
8
elsif year && month
-
2
Date.new(year,month).month_precision!
-
6
elsif year
-
2
Date.new(year).year_precision!
-
end
-
end
-
end
-
end
-
end
-
# frozen_string_literal: true
-
-
1
module Deepblue
-
-
1
require 'tasks/abstract_task'
-
1
require_relative '../../helpers/hyrax/embargo_helper'
-
-
1
class AboutToExpireEmbargoesService
-
1
include ::Hyrax::EmbargoHelper
-
-
1
def initialize( email_owner: true, expiration_lead_days: nil, skip_file_sets: true, test_mode: true, to_console: false, verbose: false )
-
12
LoggingHelper.bold_debug [ LoggingHelper.here,
-
LoggingHelper.called_from,
-
LoggingHelper.obj_class( 'class', self ),
-
"email_owner=#{email_owner}",
-
"expiration_lead_days=#{expiration_lead_days}",
-
"skip_file_sets=#{skip_file_sets}",
-
"test_mode=#{test_mode}",
-
"to_console=#{to_console}",
-
"verbose=#{verbose}",
-
"" ]
-
12
@email_owner = email_owner
-
12
@expiration_lead_days = expiration_lead_days
-
12
@skip_file_sets = skip_file_sets
-
12
@test_mode = test_mode
-
12
@to_console = to_console
-
12
@verbose = verbose
-
end
-
-
1
def run
-
3
LoggingHelper.bold_debug [ LoggingHelper.here,
-
LoggingHelper.called_from,
-
LoggingHelper.obj_class( 'class', self ),
-
"@email_owner=#{@email_owner}",
-
"@expiration_lead_days=#{@expiration_lead_days}",
-
"@skip_file_sets=#{@skip_file_sets}",
-
"@test_mode=#{@test_mode}",
-
"@to_console=#{@to_console}",
-
"@verbose=#{@verbose}",
-
"" ]
-
3
@now = DateTime.now
-
3
@assets = Array( assets_under_embargo )
-
3
if @expiration_lead_days.blank?
-
1
about_to_expire_embargoes_for_lead_days( lead_days: 7 )
-
1
about_to_expire_embargoes_for_lead_days( lead_days: 1 )
-
else
-
2
@expiration_lead_days = @expiration_lead_days.to_i
-
2
if 0 < @expiration_lead_days
-
1
about_to_expire_embargoes_for_lead_days( lead_days: @expiration_lead_days )
-
else
-
1
about_to_expire_embargoes_for_lead_days( lead_days: 7 )
-
1
about_to_expire_embargoes_for_lead_days( lead_days: 1 )
-
end
-
end
-
end
-
-
1
def about_to_expire_embargoes_for_lead_days( lead_days: )
-
5
run_msg "about_to_expire_embargoes_for_lead_days: lead_days=#{lead_days}"
-
# puts "expiration lead days: #{lead_days}" if @test_mode
-
5
lead_date = @now.beginning_of_day + lead_days.days
-
5
lead_date = lead_date.strftime "%Y%m%d"
-
5
run_msg "lead_date=#{lead_date}"
-
5
@assets.each_with_index do |asset,i|
-
5
next if @skip_file_sets && "FileSet" == asset.model_name
-
4
embargo_release_date = asset_embargo_release_date( asset: asset )
-
4
embargo_release_date = embargo_release_date.beginning_of_day.strftime "%Y%m%d"
-
4
run_msg "#{asset.id} embargo_release_date=#{embargo_release_date}"
-
4
if embargo_release_date == lead_date
-
2
run_msg "about to call about_to_expire_embargo_email for asset #{asset.id}" if @test_mode
-
about_to_expire_embargo_email( asset: asset,
-
expiration_days: lead_days,
-
email_owner: @email_owner,
-
test_mode: @test_mode,
-
2
verbose: @verbose ) unless @test_mode
-
end
-
end
-
end
-
-
1
def run_msg( msg )
-
2
LoggingHelper.debug msg
-
2
puts msg if @to_console
-
end
-
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
module Deepblue
-
-
1
require 'tasks/abstract_task'
-
1
require_relative '../../helpers/hyrax/embargo_helper'
-
-
1
class DeactivateExpiredEmbargoesService
-
1
include ::Hyrax::EmbargoHelper
-
-
1
def initialize( email_owner: true, skip_file_sets: true, test_mode: true, to_console: false, verbose: false )
-
6
LoggingHelper.bold_debug [ LoggingHelper.here,
-
LoggingHelper.called_from,
-
LoggingHelper.obj_class( 'class', self ),
-
"email_owner=#{email_owner}",
-
"skip_file_sets=#{skip_file_sets}",
-
"test_mode=#{test_mode}",
-
"to_console=#{to_console}",
-
"verbose=#{verbose}",
-
"" ]
-
6
@email_owner = email_owner
-
6
@skip_file_sets = skip_file_sets
-
6
@test_mode = test_mode
-
6
@to_console = to_console
-
6
@verbose = verbose
-
end
-
-
1
def run
-
3
LoggingHelper.bold_debug [ LoggingHelper.here,
-
LoggingHelper.called_from,
-
LoggingHelper.obj_class( 'class', self ),
-
"@email_owner=#{@email_owner}",
-
"@skip_file_sets=#{@skip_file_sets}",
-
"@test_mode=#{@test_mode}",
-
"" ]
-
3
@now = DateTime.now
-
3
@assets = Array( assets_with_expired_embargoes )
-
3
run_msg "The number of assets with expired embargoes is: #{@assets.size}" if @verbose
-
# puts
-
3
@assets.each_with_index do |asset,i|
-
3
next if @skip_file_sets && "FileSet" == asset.model_name
-
2
run_msg "#{i} - #{asset.id}, #{asset.model_name}, #{asset.human_readable_type}, #{asset.solr_document.title} #{asset.embargo_release_date}, #{asset.visibility_after_embargo}" if @verbose
-
2
model = ::ActiveFedora::Base.find asset.id
-
2
deactivate_embargo( curation_concern: model,
-
copy_visibility_to_files: true,
-
current_user: Deepblue::ProvenanceHelper.system_as_current_user,
-
email_owner: @email_owner,
-
test_mode: @test_mode,
-
verbose: @verbose )
-
end
-
end
-
-
1
def run_msg( msg )
-
2
LoggingHelper.debug msg
-
2
puts msg if @to_console
-
end
-
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
module Deepblue
-
-
1
class DoiMintingService
-
-
1
PUBLISHER = "Indiana University".freeze
-
1
RESOURCE_TYPE = "Dataset".freeze
-
-
1
attr :current_user, :work, :metadata
-
-
1
def self.mint_doi_for( work:, current_user: )
-
Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
Deepblue::LoggingHelper.called_from,
-
"work.id=#{work.id}" ]
-
service = Deepblue::DoiMintingService.new( work: work, current_user: current_user )
-
Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
Deepblue::LoggingHelper.called_from,
-
"work.id=#{work.id}",
-
"about to call service.run" ]
-
service.run
-
rescue Exception => e # rubocop:disable Lint/RescueException
-
Rails.logger.debug "DoiMintingService.mint_doi_for( work id = #{work.id}, current_user = #{current_user} ) rescue exception -- Exception: #{e.class}: #{e.message} at #{e.backtrace[0]}"
-
unless work.nil?
-
work.reload # consider locking work
-
work.doi = nil
-
work.save
-
work.reload
-
work.doi
-
end
-
raise
-
end
-
-
1
def initialize( work:, current_user: )
-
5
Rails.logger.debug "DoiMintingService.initalize( work id = #{work.id} )"
-
5
@work = work
-
5
@current_user = current_user
-
5
@metadata = generate_metadata
-
end
-
-
1
def run
-
4
Rails.logger.debug "DoiMintingService.run( work id = #{work.id} )"
-
4
rv = doi_server_reachable?
-
4
Rails.logger.debug "DoiMintingService.run doi_server_reachable?=#{rv}"
-
4
return mint_doi_failed unless rv
-
3
work.reload # consider locking work
-
3
work.doi = mint_doi
-
3
work.save
-
3
work.reload
-
3
work.provenance_mint_doi( current_user: current_user, event_note: 'DoiMintingService' )
-
3
work.doi
-
end
-
-
1
def self.print_ezid_config
-
config = Ezid::Client.config
-
puts "Ezid::Client.config.host = #{config.host}"
-
puts "Ezid::Client.config.port = #{config.port}"
-
puts "Ezid::Client.config.user = #{config.user}"
-
puts "Ezid::Client.config.password = #{config.password}"
-
puts "Ezid::Client.config.default_shoulder = #{config.default_shoulder}"
-
end
-
-
1
def ezid_config
-
3
config = Ezid::Client.config
-
3
return [ "Ezid::Client.config.host = #{config.host}",
-
"Ezid::Client.config.port = #{config.port}",
-
"Ezid::Client.config.user = #{config.user}",
-
# "Ezid::Client.config.password = #{config.password}",
-
"Ezid::Client.config.default_shoulder = #{config.default_shoulder}" ]
-
end
-
-
1
private
-
-
# Any error raised during connection is considered false
-
1
def doi_server_reachable?
-
Ezid::Client.new.server_status.up? rescue false
-
end
-
-
1
def generate_metadata
-
5
Ezid::Metadata.new.tap do |md|
-
5
md.datacite_title = work.title.first
-
5
md.datacite_publisher = PUBLISHER
-
5
md.datacite_publicationyear = Date.today.year.to_s
-
5
md.datacite_resourcetype= RESOURCE_TYPE
-
5
md.datacite_creator=work.creator.join(';')
-
5
md.target = Rails.application.routes.url_helpers.hyrax_data_set_url(id: work.id)
-
end
-
end
-
-
1
def mint_doi
-
# identifier = Ezid::Identifier.create(@metadata)
-
3
Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
Deepblue::LoggingHelper.called_from,
-
"work.id=#{work.id}",
-
"metadata=#{metadata}" ]
-
-
# Rails.logger.debug "DoiMintingService.mint_doi( #{metadata} )"
-
# msg = ezid_config.join("\n")
-
# Rails.logger.debug msg
-
3
Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
Deepblue::LoggingHelper.called_from,
-
"work.id=#{work.id}" ] + ezid_config
-
3
shoulder = Ezid::Client.config.default_shoulder
-
3
identifier = Ezid::Identifier.mint( shoulder, @metadata )
-
3
identifier.id
-
end
-
-
1
def mint_doi_failed
-
1
Rails.logger.error "DoiMintingService.mint_doi_failed work id = #{work.id}"
-
1
work.reload # consider locking work
-
1
work.doi = nil
-
1
work.save
-
1
work.reload
-
1
work.doi
-
end
-
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
module Deepblue
-
-
1
require 'json'
-
-
# rubocop:disable Metrics/ParameterLists
-
1
class LogExporter < LogReader
-
-
1
DEFAULT_PP_EXPORT = false
-
-
1
attr_accessor :output, :output_mode
-
1
attr_reader :lines_exported
-
1
attr_reader :output_close, :output_pathname
-
1
attr_accessor :pp_export
-
-
1
def initialize( filter: nil, input:, output:, options: {} )
-
9
super( filter: filter, input: input, options: options )
-
9
@output = output
-
9
@pp_export = option( key: 'pp_export', default_value: DEFAULT_PP_EXPORT )
-
9
puts "pp_export=#{pp_export}" if verbose
-
end
-
-
1
def export_line( line, timestamp, event, event_note, class_name, id, raw_key_values )
-
2
if pp_export
-
1
pretty_print_line line, timestamp, event, event_note, class_name, id, raw_key_values
-
else
-
1
@output.puts line
-
end
-
end
-
-
1
def pretty_print_line( line, timestamp, event, event_note, class_name, id, raw_key_values )
-
1
@output.puts "#{timestamp} #{event}/#{event_note}/#{class_name}/#{id}"
-
1
@output.puts JSON.pretty_generate( JSON.parse( raw_key_values ) )
-
end
-
-
1
def output_mode
-
2
@output_mode ||= option( key: 'output_mode', default_value: 'w' )
-
end
-
-
1
def run
-
1
@lines_exported = 0
-
1
log_open_output
-
1
readlines do |line, timestamp, event, event_note, class_name, id, raw_key_values|
-
export_line line, timestamp, event, event_note, class_name, id, raw_key_values
-
@lines_exported += 1
-
end
-
ensure
-
1
log_close_output
-
end
-
-
# rubocop:disable Rails/Output
-
1
def quick_report
-
1
super
-
1
puts "output_pathname: #{@output_pathname}"
-
1
puts "lines_exported: #{@lines_exported}"
-
end
-
# rubocop:enable Rails/Output
-
-
1
protected
-
-
1
def log_close_output
-
return unless @output_close
-
@output.flush unless @output.nil? # rubocop:disable Style/SafeNavigation
-
@output.close unless @output.nil? # rubocop:disable Style/SafeNavigation
-
end
-
-
1
def log_open_output
-
# puts "@output=#{@output}"
-
@output_pathname = Pathname.new @output if @output.is_a? String
-
# puts "@output_pathname=#{@output_pathname}"
-
@output_pathname = @output if @output.is_a? Pathname
-
# puts "@output_pathname=#{@output_pathname}"
-
# return if @output_pathname.blank? # TODO: why doesn't this work
-
# puts "output_mode=#{output_mode}"
-
@output = open( @output_pathname, output_mode )
-
@output_close = true
-
end
-
-
end
-
# rubocop:enable Metrics/ParameterLists
-
-
end
-
# frozen_string_literal: true
-
-
1
module Deepblue
-
-
1
class LogExtracter < LogReader
-
-
1
attr_reader :lines_extracted
-
-
1
def initialize( filter: nil, input:, extract_parsed_tuple: false, options: {} )
-
super( filter: filter, input: input, options: options )
-
@extract_parsed_tuple = extract_parsed_tuple
-
@lines_extracted = []
-
end
-
-
1
def extract_line( line, timestamp, event, event_note, class_name, id, raw_key_values )
-
if @extract_parsed_tuple
-
@lines_extracted << [line, timestamp, event, event_note, class_name, id, raw_key_values]
-
else
-
@lines_extracted << line
-
end
-
end
-
-
1
def run
-
readlines do |line, timestamp, event, event_note, class_name, id, raw_key_values|
-
extract_line line, timestamp, event, event_note, class_name, id, raw_key_values
-
end
-
end
-
-
end
-
-
end
-
# frozen_string_literal: true
-
-
# rubocop:disable Metrics/ParameterLists
-
1
module Deepblue
-
-
1
class AbstractFilter
-
-
1
attr_accessor :verbose
-
-
1
def initialize( options: {} )
-
9
@verbose = options_value( options, key: "verbose_filters", default_value: false )
-
# ::Deepblue::LoggingHelper.bold_debug [ ::Deepblue::LoggingHelper.here,
-
# ::Deepblue::LoggingHelper.called_from,
-
# "verbose=#{verbose}",
-
# "" ]
-
# puts "#{::Deepblue::LoggingHelper.here} self.class.name=#{self.class.name}"
-
# puts "#{::Deepblue::LoggingHelper.here} options=#{options}"
-
# puts "#{::Deepblue::LoggingHelper.here} verbose=#{verbose}"
-
end
-
-
1
protected
-
-
1
def options_value( options, key:, default_value: nil, verbose: false )
-
9
return default_value if options.blank?
-
return default_value unless options.key? key
-
puts "set key #{key} to #{options[key]}" if verbose
-
return options[key]
-
end
-
-
end
-
-
1
class AbstractArrayOfFilters < AbstractFilter
-
-
1
attr_reader :filters
-
-
1
def initialize( filters: [], options: {} )
-
super( options: options )
-
@filters = Array( filters )
-
end
-
-
1
def add_filters( new_filters:, append: true )
-
new_filters = Array( new_filters )
-
if append
-
@filters.concat new_filters
-
else
-
until new_filters.empty?
-
filter = new_filters.pop
-
@filters.unshift filter
-
end
-
end
-
return self
-
end
-
-
1
def all_log_filter?
-
false
-
end
-
-
end
-
-
1
class AndLogFilter < AbstractArrayOfFilters
-
-
1
def initialize( filters: [], options: {} )
-
super( filters: filters, options: options )
-
end
-
-
1
def and( new_filters: )
-
add_filters( new_filters: new_filters )
-
end
-
-
1
def or( new_filters: )
-
new_filter = OrLogFilter.new( filters: self ).add_filters( new_filters: new_filters )
-
return new_filter
-
end
-
-
1
def filter_in( timestamp, event, event_note, class_name, id, raw_key_values )
-
filters.each do |filter|
-
return false unless filter.filter_in( timestamp, event, event_note, class_name, id, raw_key_values )
-
end
-
return true
-
end
-
-
end
-
-
1
class OrLogFilter < AbstractArrayOfFilters
-
-
1
def initialize( filters: [], options: {} )
-
super( filters: filters, options: options )
-
end
-
-
1
def and( new_filters: )
-
new_filter = AndLogFilter.new( filters: self ).add_filters( new_filters: new_filters )
-
return new_filter
-
end
-
-
1
def or( new_filters: )
-
add_filters( new_filters: new_filters )
-
end
-
-
1
def filter_in( timestamp, event, event_note, class_name, id, raw_key_values )
-
filters.each do |filter|
-
return true if filter.filter_in( timestamp, event, event_note, class_name, id, raw_key_values )
-
end
-
return false
-
end
-
-
end
-
-
1
class AbstractLogFilter < AbstractFilter
-
-
1
def initializer( options: {} )
-
super( options: options )
-
end
-
-
1
def all_log_filter?
-
false
-
end
-
-
1
def and( new_filters: )
-
new_filter = AndLogFilter.new( filters: self ).add_filters( new_filters: new_filters )
-
return new_filter
-
end
-
-
1
def or( new_filters: )
-
new_filter = OrLogFilter.new( filters: self ).add_filters( new_filters: new_filters )
-
return new_filter
-
end
-
-
1
def filter_in( _timestamp, _event, _event_note, _class_name, _id, _raw_key_values )
-
puts "#{::Deepblue::LoggingHelper.here} filter_in returning false"
-
false
-
end
-
-
1
protected
-
-
1
def arg_to_array( arg )
-
arr = if arg.is_a? String
-
arg.split( ' ' )
-
else
-
Array( arg )
-
end
-
return arr
-
end
-
-
1
def arg_to_timestamp( arg, timestamp_format: )
-
timestamp = arg
-
if timestamp_format.blank?
-
return DateTime.strptime( arg, "%Y-%m-%d %H:%M:%S" ) if arg.match?( /\d\d\d\d\-\d\d?\-\d\d? \d\d?:\d\d:\d\d/ )
-
return DateTime.strptime( arg, "%m/%d/%Y" ) if arg.match?( /\d\d?\/\d\d?\/\d\d\d\d/ )
-
return DateTime.strptime( arg, "%m-%d-%Y" ) if arg.match?( /\d\d?\-\d\d?\-\d\d\d\d/ )
-
return DateTime.strptime( arg, "%Y" ) if arg.match?( /\d\d\d\d/ )
-
timestamp = DateTime.parse( arg ) if arg.is_a? String
-
elsif arg.is_a? String
-
timestamp = DateTime.strptime( arg, timestamp_format )
-
end
-
return timestamp
-
rescue ArgumentError
-
puts "DateTime.parse failed - arg='#{arg}' timestamp_format='#{timestamp_format}'" # - #{e.class}: #{e.message} at #{e.backtrace[0]}"
-
end
-
-
1
def parse_key_values( raw_key_values )
-
ProvenanceHelper.parse_log_line_key_values raw_key_values
-
end
-
-
end
-
-
1
class NotLogFilter < AbstractLogFilter
-
-
1
attr_reader :filter
-
-
1
def initialize( filter:, options: {} )
-
super( options: options )
-
@filter = filter
-
end
-
-
1
def filter_in( timestamp, event, event_note, class_name, id, raw_key_values )
-
rv = @filter.filter_in( timestamp, event, event_note, class_name, id, raw_key_values )
-
return !rv
-
end
-
-
end
-
-
1
class AllLogFilter < AbstractLogFilter
-
-
1
def initialize( options: {} )
-
9
super( options: options )
-
end
-
-
1
def all_log_filter?
-
true
-
end
-
-
1
def filter_in( _timestamp, _event, _event_note, _class_name, _id, _raw_key_values )
-
true
-
end
-
-
end
-
-
1
class ClassNameLogFilter < AbstractLogFilter
-
-
1
attr_reader :matching_class_names
-
-
1
def initialize( matching_class_names: [], options: {} )
-
super( options: options )
-
@matching_classe_names = arg_to_array matching_class_names
-
end
-
-
1
def filter_in( _timestamp, _event, _event_note, class_name, _id, _raw_key_values )
-
puts "#{@matching_classe_names} include? #{class_name}" if verbose
-
@matching_classe_names.include? class_name
-
end
-
-
end
-
-
1
class CollectionLogFilter < ClassNameLogFilter
-
-
1
def initialize( options: {} )
-
super( matching_class_names: [ Collection.name ], options: options )
-
end
-
-
end
-
-
1
class DataSetLogFilter < ClassNameLogFilter
-
-
1
def initialize( options: {} )
-
super( matching_class_names: [ DataSet.name ], options: options )
-
end
-
-
end
-
-
1
class DateLogFilter < AbstractLogFilter
-
-
1
attr_reader :begin_timestamp, :end_timestamp
-
-
# Examples:
-
#
-
# filter = Deepblue::DataLogFilter.new( begin_timestamp: Date.new=( 2018, 08, 17 ) )
-
# filter = Deepblue::DataLogFilter.new( begin_timestamp: DateTime.new( 2018, 08, 17, 10, 0, 0 ) )
-
# filter = Deepblue::DataLogFilter.new( begin_timestamp: DateTime.now - 3.days )
-
#
-
# filter = Deepblue::DataLogFilter.new( begin_timestamp: '2018/08/17', timestamp_format: '%Y/%m/%d' )
-
# filter = Deepblue::DataLogFilter.new( begin_timestamp: '2018/08/17 12:10:00', timestamp_format: '%Y/%m/%d %H:%M:%S' )
-
#
-
# filter = Deepblue::DateLogFilter.new( begin_timestamp: "2018-08-16 15:00:00", timestamp_format: '%Y-%m-%d %H:%M:%S' )
-
#
-
# filter = Deepblue::DateLogFilter.new( begin_timestamp: Date.new - 2.days )
-
#
-
1
def initialize( begin_timestamp: nil, end_timestamp: nil, timestamp_format: '', options: {} )
-
super( options: options )
-
@begin_timestamp = arg_to_timestamp( begin_timestamp, timestamp_format: timestamp_format )
-
@end_timestamp = arg_to_timestamp( end_timestamp, timestamp_format: timestamp_format )
-
end
-
-
1
def filter_in( timestamp, _event, _event_note, _class_name, _id, _raw_key_values )
-
before_begin = false
-
before_begin = timestamp < @begin_timestamp if @begin_timestamp.present?
-
after_end = false
-
after_end = timestamp > @after_timestamp if @after_timestamp.present?
-
puts "#{::Deepblue::LoggingHelper.here} filter_in returning..."
-
puts "#{timestamp} is before_begin? #{before_begin} and #{timestamp} is after_end? #{after_end}"
-
return !before_begin && !after_end
-
end
-
-
1
def begin_timestamp_label
-
ts = begin_timestamp
-
return '' if ts.blank?
-
ts.strftime "%Y%m%d%H%M%S"
-
end
-
-
1
def end_timestamp_label
-
ts = end_timestamp
-
return '' if ts.blank?
-
ts.strftime "%Y%m%d%H%M%S"
-
end
-
-
1
def date_range_label
-
ts1 = begin_timestamp_label
-
ts2 = end_timestamp_label
-
return '' if ts1.blank? && ts2.blank?
-
return ts1 if ts2.blank?
-
return ts2 if ts1.blank?
-
return "#{ts1}-#{ts2}"
-
end
-
-
end
-
-
1
class EventLogFilter < AbstractLogFilter
-
-
1
attr_reader :matching_events
-
-
1
def initialize( matching_events: [], options: {} )
-
super( options: options )
-
@matching_events = arg_to_array matching_events
-
end
-
-
1
def filter_in( _timestamp, event, _event_note, _class_name, _id, _raw_key_values )
-
puts "#{@matching_events} include? #{event}" if verbose
-
@matching_events.include? event
-
end
-
-
end
-
-
1
class CreateOrDestroyLogFilter < EventLogFilter
-
-
1
def initialize( options: {} )
-
super( matching_events: [ AbstractEventBehavior::EVENT_CREATE, AbstractEventBehavior::EVENT_DESTROY ],
-
options: options )
-
end
-
-
end
-
-
1
class FileSetFilter < ClassNameLogFilter
-
-
1
def initialize( options: {} )
-
super( matching_class_names: [ FileSet.name ], options: options )
-
end
-
-
end
-
-
1
class FixityCheckLogFilter < EventLogFilter
-
-
1
def initialize( options: {} )
-
super( matching_events: [ AbstractEventBehavior::EVENT_FIXITY_CHECK ], options: options )
-
end
-
-
end
-
-
1
class MigrationEventFilter < EventLogFilter
-
-
1
def initialize( options: {} )
-
super( matching_events: [ AbstractEventBehavior::EVENT_CHILD_ADD,
-
AbstractEventBehavior::EVENT_FIXITY_CHECK,
-
AbstractEventBehavior::EVENT_INGEST,
-
AbstractEventBehavior::EVENT_MIGRATE,
-
AbstractEventBehavior::EVENT_VIRUS_SCAN ],
-
options: options )
-
end
-
-
end
-
-
1
class IdLogFilter < AbstractLogFilter
-
-
1
attr_reader :matching_ids
-
-
1
def initialize( matching_ids: [], options: {} )
-
super( options: options )
-
@matching_ids = arg_to_array matching_ids
-
end
-
-
1
def filter_in( _timestamp, _event, _event_note, _class_name, id, _raw_key_values )
-
@matching_ids.include? id
-
end
-
-
1
def filter_in_child_id( raw_key_values )
-
key_values = parse_key_values raw_key_values
-
child_id = key_values['child_id']
-
return false if child_id.blank?
-
@matching_ids.include? child_id
-
end
-
-
1
def filter_in_parent_id( raw_key_values )
-
key_values = parse_key_values raw_key_values
-
parent_id = key_values['parent_id']
-
return false if parent_id.blank?
-
@matching_ids.include? parent_id
-
end
-
-
1
def filter_in_parent_or_child_id( raw_key_values )
-
key_values = parse_key_values raw_key_values
-
id = key_values['parent_id']
-
return true if id.present? && @matching_ids.include?( id )
-
id = key_values['child_id']
-
return false if id.blank?
-
@matching_ids.include? id
-
end
-
-
end
-
-
1
class ChildIdLogFilter < IdLogFilter
-
-
1
def initialize( matching_ids: [], options: {} )
-
super( matching_ids: matching_ids, options: options )
-
end
-
-
1
def filter_in( _timestamp, _event, _event_note, _class_name, _id, raw_key_values )
-
filter_in_child_id raw_key_values
-
end
-
-
end
-
-
1
class IdOrParentIdLogFilter < IdLogFilter
-
-
1
def initialize( matching_ids: [], options: {} )
-
super( matching_ids: matching_ids , options: options)
-
end
-
-
1
def filter_in( timestamp, event, event_note, class_name, id, raw_key_values )
-
return true if super.filter_in( timestamp, event, event_note, class_name, id, raw_key_values )
-
filter_in_parent_id raw_key_values
-
end
-
-
end
-
-
1
class IdOrParentOrChildIdLogFilter < IdLogFilter
-
-
1
def initialize( matching_ids: [], options: {} )
-
super( matching_ids: matching_ids, options: options )
-
end
-
-
1
def filter_in( timestamp, event, event_note, class_name, id, raw_key_values )
-
return true if super.filter_in( timestamp, event, event_note, class_name, id, raw_key_values )
-
filter_in_parent_or_child_id raw_key_values
-
end
-
-
end
-
-
1
class ParentIdLogFilter < IdLogFilter
-
-
1
def initialize( matching_ids: [], options: {} )
-
super( matching_ids: matching_ids, options: options )
-
end
-
-
1
def filter_in( _timestamp, _event, _event_note, _class_name, _id, raw_key_values )
-
filter_in_parent_id raw_key_values
-
end
-
-
end
-
-
end
-
# rubocop:enable Metrics/ParameterLists
-
# frozen_string_literal: true
-
-
1
module Deepblue
-
-
1
require_relative './log_filter'
-
-
1
class LogReader
-
-
1
DEFAULT_BEGIN_TIMESTAMP = ''
-
1
DEFAULT_END_TIMESTAMP = ''
-
1
DEFAULT_TIMESTAMP_FORMAT = ''
-
1
DEFAULT_VERBOSE = false
-
1
DEFAULT_VERBOSE_FILTER = false
-
-
1
attr_accessor :verbose, :verbose_filter
-
-
1
attr_reader :current_line
-
1
attr_reader :date_range_filter
-
1
attr_reader :filter # , :filter_predicate
-
1
attr_reader :input, :input_pathname, :input_mode, :input_close
-
1
attr_reader :lines_parsed, :lines_read
-
1
attr_reader :options
-
1
attr_reader :parsed,
-
:parsed_timestamp,
-
:parsed_event,
-
:parsed_event_note,
-
:parsed_class_name,
-
:parsed_id,
-
:parsed_raw_key_values
-
-
# filter_predicate: ->( _timestamp, _event, _event_note, _class_name, _id, _key_values ) { true },
-
1
def initialize( filter: nil,
-
input:,
-
options: {} )
-
-
34
@filter = initialize_filter filter
-
# @filter_predicate = filter_predicate
-
34
@input = input
-
34
@options = options
-
34
@verbose = option( key: 'verbose', default_value: DEFAULT_VERBOSE )
-
34
@verbose_filter = option( key: 'verbose_filter', default_value: DEFAULT_VERBOSE_FILTER )
-
34
add_date_range_filter
-
end
-
-
1
def initialize_filter( filter, options: {} )
-
37
return AllLogFilter.new if filter.blank?
-
27
return AndLogFilter.new( filters: filter, options: options ) if filter.is_a? Array
-
26
filter
-
end
-
-
1
def add_date_range_filter( options: {} )
-
36
begin_timestamp = option( key: 'begin' )
-
36
begin_timestamp = option( key: 'begin_timestamp', default_value: DEFAULT_BEGIN_TIMESTAMP ) unless begin_timestamp.present?
-
36
end_timestamp = option( key: 'end' )
-
36
end_timestamp = option( key: 'end_timestamp', default_value: DEFAULT_END_TIMESTAMP ) unless end_timestamp.present?
-
36
timestamp_format = option( key: 'format' )
-
36
timestamp_format = option( key: 'timestamp_format', default_value: DEFAULT_TIMESTAMP_FORMAT ) unless timestamp_format.present?
-
36
puts "add_date_range_filter begin_timestamp=#{begin_timestamp} end_timestamp=#{end_timestamp}" if verbose_filter # rubocop:disable Rails/Output
-
36
return if begin_timestamp.blank? && end_timestamp.blank?
-
1
@date_range_filter = DateLogFilter.new( begin_timestamp: begin_timestamp,
-
end_timestamp: end_timestamp,
-
timestamp_format: timestamp_format,
-
options: options )
-
1
filter_and( new_filters: date_range_filter )
-
end
-
-
1
def filter_and( new_filters:, append: true, options: {} )
-
2
return if new_filters.blank?
-
1
current_filter = @filter
-
1
@filter = filter_refresh current_filter: current_filter, new_filters: new_filters, append: append, options: options
-
1
puts "filter_and @filter=#{@filter}" if verbose # rubocop:disable Rails/Output
-
end
-
-
1
def filter_refresh( current_filter:, new_filters:, append: true, options: {} )
-
4
if current_filter.all_log_filter?
-
2
if new_filters.is_a? Array
-
1
AndLogFilter.new( filters: new_filters, options: options )
-
else
-
1
new_filters
-
end
-
2
elsif append
-
1
current_filter.and( new_filters: new_filters )
-
else
-
1
new_filters = Array( new_filters )
-
1
new_filters.concat current_filter
-
1
AndLogFilter.new( filters: new_filters, options: options )
-
end
-
end
-
-
1
def filter_or( new_filters:, append: true, options: {} )
-
4
return if new_filters.blank?
-
3
current_filter = @filter
-
3
@filter = if append && current_filter.all_log_filter?
-
1
current_filter # new_filters are unreachable, so ignore
-
2
elsif append
-
1
current_filter.or( new_filters: new_filters )
-
else
-
1
new_filters = Array( new_filters )
-
1
new_filters.concat current_filter
-
1
OrLogFilter.new( filters: new_filters, options: options )
-
end
-
end
-
-
1
def input_mode
-
1
@input_mode ||= option( key: 'input_mode', default_value: 'r' )
-
end
-
-
1
def parse_line
-
# line is of the form: "timestamp event/event_note/class_name/id key_values"
-
3
@parsed_timestamp = nil
-
3
@parsed_event = nil
-
3
@parsed_event_note = nil
-
3
@parsed_class_name = nil
-
3
@parsed_id = nil
-
3
@parsed_raw_key_values = nil
-
3
@parsed = false
-
3
return if @current_line.blank?
-
@parsed_timestamp,
-
@parsed_event,
-
@parsed_event_note,
-
@parsed_class_name,
-
@parsed_id,
-
2
@parsed_raw_key_values = ProvenanceHelper.parse_log_line( @current_line,
-
line_number: @lines_read,
-
raw_key_values: true )
-
1
@lines_parsed += 1
-
1
@parsed = true
-
rescue LogParseError => e
-
1
puts e.message # rubocop:disable Rails/Output
-
end
-
-
# rubocop:disable Rails/Output
-
1
def quick_report
-
2
puts
-
2
puts "Quick report"
-
2
puts "input_pathname: #{input_pathname}"
-
2
puts "lines_read: #{lines_read}"
-
2
puts "lines_parsed: #{lines_parsed}"
-
end
-
# rubocop:enable Rails/Output
-
-
1
def readlines( &for_filtered_line_block )
-
@lines_parsed = 0
-
@lines_read = 0
-
log_open_input
-
# for each line of input
-
@current_line = nil
-
line_filter = filter
-
until @input.eof?
-
@current_line = @input.readline
-
@current_line.chop!
-
@lines_read += 1
-
parse_line
-
next unless @parsed
-
# next @filter_predicate.call( @parsed_timestamp,
-
next unless line_filter.filter_in( @parsed_timestamp,
-
@parsed_event,
-
@parsed_event_note,
-
@parsed_class_name,
-
@parsed_id,
-
@parsed_raw_key_values )
-
next unless for_filtered_line_block
-
yield( @current_line,
-
@parsed_timestamp,
-
@parsed_event,
-
@parsed_event_note,
-
@parsed_class_name,
-
@parsed_id,
-
@parsed_raw_key_values )
-
end
-
ensure
-
log_close_input
-
end
-
-
1
protected
-
-
1
def log_close_input
-
return unless @input_close
-
@input.close unless @input.nil? # rubocop:disable Style/SafeNavigation
-
end
-
-
1
def log_open_input
-
@input_pathname = Pathname.new @input if @input.is_a? String
-
@input_pathname = @input if @input.is_a? Pathname
-
return unless @input_pathname.exist?
-
@input = open( @input_pathname, 'r' )
-
@input_close = true
-
end
-
-
1
def option( key:, default_value: nil )
-
281
return default_value unless options_key? key
-
return @options[key] if @options.key? key
-
return @options[key.to_sym] if key.is_a? String
-
return @options[key.to_s] if key.is_a? Symbol
-
return default_value
-
end
-
-
1
def options_key?( key )
-
281
return true if @options.key? key
-
281
return @options.key? key.to_sym if key.is_a? String
-
return @options.key? key.to_s if key.is_a? Symbol
-
return false
-
end
-
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
module Deepblue
-
-
1
require_relative './log_reader'
-
-
# rubocop:disable Metrics/ParameterLists
-
1
class LogReporter < LogReader
-
-
1
attr_reader :lines_reported
-
1
attr_reader :output, :output_close, :output_mode, :output_pathname
-
-
1
attr_reader :timestamp_first, :timestamp_last
-
1
attr_reader :class_events
-
1
attr_reader :events
-
1
attr_reader :ids
-
-
1
def initialize( filter: nil, input:, options: {} )
-
4
super( filter: filter, input: input, options: options )
-
4
@output_close = false
-
4
@output_mode = 'w'
-
4
@output_pathname = nil
-
end
-
-
# rubocop:disable Rails/Output
-
1
def report
-
1
run
-
# TODO: pretty output
-
1
puts "timestamp_first = #{@timestamp_first}"
-
1
puts "timestamp_last = #{@timestamp_last}"
-
1
puts "ids = #{ids}"
-
1
puts "events = #{events}"
-
1
puts "class_events = #{class_events}"
-
end
-
# rubocop:enable Rails/Output
-
-
1
def run
-
1
initialize_report_values
-
1
readlines do |line, timestamp, event, event_note, class_name, id, raw_key_values|
-
line_read( line, timestamp, event, event_note, class_name, id, raw_key_values )
-
end
-
end
-
-
1
protected
-
-
1
def class_event_key( class_name:, event: )
-
"#{class_name}_#{event}"
-
end
-
-
1
def initialize_report_values
-
@lines_reported = 0
-
@timestamp_first = nil
-
@timestamp_last = nil
-
@events = Hash.new { |h, k| h[k] = 0 }
-
@class_events = Hash.new { |h, k| h[k] = 0 }
-
@ids = {}
-
end
-
-
1
def line_read( _line, timestamp, event, _event_note, class_name, id, _raw_key_values )
-
@lines_reported += 1
-
@timestamp_first = timestamp if @timestamp_first.blank?
-
@timestamp_last = timestamp
-
@ids[id] = true unless @ids.key? id
-
@events[event] = @events[event] + 1
-
class_event_key = class_event_key( class_name: class_name, event: event )
-
@class_events[class_event_key] = @class_events[class_event_key] + 1
-
end
-
-
end
-
# rubocop:enable Metrics/ParameterLists
-
-
end
-
# frozen_string_literal: true
-
-
1
module Deepblue
-
-
1
class LoggingCallback
-
-
1
def self.process_event( event_name:, msg: )
-
1
Rails.logger.debug ">>>>> #{event_name} >>>>>"
-
1
Rails.logger.debug ">>>>> #{event_name} >>>>>"
-
1
Rails.logger.debug "#{event_name} >>>>> #{msg}"
-
1
Rails.logger.debug ">>>>> #{event_name} >>>>>"
-
1
Rails.logger.debug ">>>>> #{event_name} >>>>>"
-
end
-
-
1
def self.process_event_curation_concern( event_name:, curation_concern:, user: )
-
3
Rails.logger.debug ">>>>> #{event_name} >>>>>"
-
3
Rails.logger.debug ">>>>> #{event_name} >>>>>"
-
3
Rails.logger.debug "#{event_name} >>>>> #{user} >>>>> #{curation_concern}"
-
3
Rails.logger.debug ">>>>> #{event_name} >>>>>"
-
3
Rails.logger.debug ">>>>> #{event_name} >>>>>"
-
end
-
-
1
def self.process_event_file_set( event_name:, file_set:, user: )
-
3
Rails.logger.debug ">>>>> #{event_name} >>>>>"
-
3
Rails.logger.debug ">>>>> #{event_name} >>>>>"
-
3
Rails.logger.debug "#{event_name} >>>>> #{user} >>>>> #{file_set}"
-
3
Rails.logger.debug ">>>>> #{event_name} >>>>>"
-
3
Rails.logger.debug ">>>>> #{event_name} >>>>>"
-
end
-
-
1
def self.process_event_user( event_name:, user:, msg: '' )
-
5
Rails.logger.debug ">>>>> #{event_name} >>>>>"
-
5
Rails.logger.debug ">>>>> #{event_name} >>>>>"
-
5
Rails.logger.debug "#{event_name} >>>>> #{user}" if msg.blank?
-
5
Rails.logger.debug "#{event_name} >>>>> #{user} >>>>> #{msg}" if msg.present?
-
5
Rails.logger.debug ">>>>> #{event_name} >>>>>"
-
5
Rails.logger.debug ">>>>> #{event_name} >>>>>"
-
end
-
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
module Deepblue
-
-
1
class LoggingService
-
-
1
attr_reader :event_name
-
-
1
def initialize( event_name: )
-
11
@event_name = event_name
-
11
ActiveSupport::Notifications.subscribe @event_name do |*args|
-
process_event( *args )
-
end
-
end
-
-
1
def process_event( *args )
-
Rails.logger.debug ">>>>> #{@event_name} >>>>>"
-
Rails.logger.debug ">>>>> #{@event_name} >>>>>"
-
Rails.logger.debug "#{@event_name} >>>>> #{args.extract_options!}"
-
Rails.logger.debug ">>>>> #{@event_name} >>>>>"
-
Rails.logger.debug ">>>>> #{@event_name} >>>>>"
-
end
-
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
module Deepblue
-
-
1
require 'json'
-
-
1
require_relative './log_extracter'
-
1
require_relative './log_filter'
-
-
1
class ProvenanceLogService
-
-
1
def self.provenance_log_name
-
DeepBlueDocs::Application.config.provenance_log_name
-
end
-
-
1
def self.provenance_log_path
-
DeepBlueDocs::Application.config.provenance_log_path
-
end
-
-
1
def self.entries( id, refresh: false )
-
Deepblue::LoggingHelper.bold_debug "ProvenanceLogService.entries( #{id}, #{refresh} )"
-
file_path = Deepblue::ProvenancePath.path_for_reference( id )
-
if !refresh && File.exist?( file_path )
-
rv = read_entries( file_path )
-
else
-
rv = filter_entries( id )
-
write_entries( file_path, rv )
-
end
-
Deepblue::LoggingHelper.bold_debug "ProvenanceLogService.entries( #{id} ) read #{rv.size} entries"
-
return rv
-
end
-
-
1
def self.filter_entries( id )
-
input = Rails.root.join( 'log', "provenance_#{Rails.env}.log" )
-
filter = Deepblue::IdLogFilter.new( matching_ids: Array( id ) )
-
extractor = Deepblue::LogExtracter.new( filter: filter, input: input )
-
extractor.run
-
rv = extractor.lines_extracted
-
return rv
-
end
-
-
1
def self.parse_entry( entry, line_number: 0 )
-
# line is of the form: "timestamp event/event_note/class_name/id key_values"
-
timestamp = nil
-
event = nil
-
event_note = nil
-
class_name = nil
-
id = nil
-
raw_key_values = nil
-
timestamp, event, event_note, class_name, id,
-
raw_key_values = ProvenanceHelper.parse_log_line( entry, line_number: line_number, raw_key_values: true )
-
return { timestamp: timestamp, event: event, event_note: event_note, class_name: class_name, id: id,
-
raw_key_values: raw_key_values, line_number: line_number, parse_error: nil }
-
rescue LogParseError => e
-
return { entry: entry, line_number: line_number, parse_error: e }
-
end
-
-
1
def self.pp_key_values( raw_key_values )
-
return JSON.pretty_generate( JSON.parse( raw_key_values ) )
-
end
-
-
1
def self.key_values_to_table( key_values, parse: false )
-
key_values = JSON.parse( key_values ) if parse
-
if key_values.is_a? Array
-
case key_values.size
-
when 0 then return "<table>\n<tr><td> </td></tr>\n</table>\n"
-
when 1 then return "<table>\n<tr><td>#{ERB::Util.html_escape( key_values[0] )}</td></tr>\n</table>\n"
-
else
-
arr = key_values.map { |x| key_values_to_table( x ) }
-
return "<table>\n<tr><td>#{arr.join("</td></tr>\n<tr><td>")}</td></tr>\n</table>\n"
-
end
-
elsif key_values.is_a? Hash
-
rv = "<table>\n"
-
key_values.each_pair do |key,value|
-
rv += "<tr><td>#{ERB::Util.html_escape( key )}</td><td>#{key_values_to_table( value )}</td></tr>\n"
-
end
-
rv += "</table>\n"
-
return rv
-
else
-
return ERB::Util.html_escape( key_values )
-
end
-
end
-
-
1
def self.read_entries( file_path )
-
entries = []
-
open( file_path, "r" ) do |fin|
-
until fin.eof?
-
begin
-
line = fin.readline
-
line.chop!
-
entries << line
-
rescue EOFError
-
line = nil
-
end
-
end
-
end
-
return entries
-
end
-
-
1
def self.write_entries( file_path, entries )
-
# file_path = Pathname.new file_path unless file_path.is_a? Pathname
-
dir = File.dirname file_path
-
FileUtils.mkpath dir unless Dir.exist? dir
-
File.open( file_path, "w" ) do |out|
-
entries.each { |line| out.puts line }
-
end
-
end
-
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
module Deepblue
-
-
1
class ProvenancePath
-
-
1
attr_reader :id, :destination_name
-
-
1
class << self
-
-
# Path on file system where derivative file is stored
-
# @param [ActiveFedora::Base or String] object either the AF object or its id
-
1
def path_for_reference( object )
-
new( object, "provenance" ).provenance_path
-
end
-
-
# # @param [ActiveFedora::Base or String] object either the AF object or its id
-
# # @return [Array<String>] Array of paths to derivatives for this object.
-
# def derivatives_for_reference(object)
-
# new(object).all_paths
-
# end
-
-
end
-
-
# @param [ActiveFedora::Base, String] object either the AF object or its id
-
1
def initialize( object, destination_name = nil )
-
@id = object.is_a?(String) ? object : object.id
-
@destination_name = destination_name
-
end
-
-
1
def provenance_path
-
"#{path_prefix}-#{file_name}" # TODO
-
end
-
-
# def all_paths
-
# Dir.glob(root_path.join("*")).select do |path|
-
# path.start_with?(path_prefix.to_s)
-
# end
-
# end
-
-
1
private
-
-
# @return [String] Returns the root path where derivatives will be generated into.
-
1
def root_path
-
Pathname.new( provenance_path ).dirname
-
end
-
-
# @return <Pathname> Full prefix of the path for object.
-
1
def path_prefix
-
Pathname.new( Hyrax.config.derivatives_path ).join( pair_path ) # TODO
-
end
-
-
1
def pair_path
-
id.split('').each_slice(2).map(&:join).join('/')
-
end
-
-
1
def file_name
-
return unless destination_name
-
destination_name + extension
-
end
-
-
1
def extension
-
".log"
-
end
-
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
module Deepblue
-
-
1
module VirusScanService
-
-
1
VIRUS_SCAN_ERROR = 'scan error'
-
1
VIRUS_SCAN_NOT_VIRUS = 'not virus'
-
1
VIRUS_SCAN_SKIPPED = 'scan skipped'
-
1
VIRUS_SCAN_SKIPPED_SERVICE_UNAVAILABLE = 'scan skipped service unavailable'
-
1
VIRUS_SCAN_SKIPPED_TOO_BIG = 'scan skipped too big'
-
1
VIRUS_SCAN_UNKNOWN = 'scan unknown'
-
1
VIRUS_SCAN_VIRUS = 'virus'
-
-
1
def virus_scan_detected_virus?( scan_result: )
-
2
VIRUS_SCAN_VIRUS == scan_result
-
end
-
-
1
def virus_scan_service_name
-
1
Hydra::Works.default_system_virus_scanner.name
-
end
-
-
1
def virus_scan_skipped?( scan_result: )
-
3
return false if scan_result.blank?
-
2
scan_result.start_with? 'scan skipped'
-
end
-
-
1
def virus_scan_timestamp_now
-
1
Time.now.to_formatted_s(:db )
-
end
-
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
module Deepblue
-
-
1
class YamlPopulateService
-
-
1
DEFAULT_CREATE_ZERO_LENGTH_FILES = true
-
1
DEFAULT_OVERWRITE_EXPORT_FILES = true
-
-
1
attr_accessor :mode, :source
-
-
# TODO: count these
-
1
attr_reader :total_collections_exported, :total_file_sets_exported, :total_works_exported, :total_users_exported
-
-
1
attr_accessor :overwrite_export_files, :create_zero_length_files
-
-
1
def initialize( create_zero_length_files: DEFAULT_CREATE_ZERO_LENGTH_FILES,
-
mode: MetadataHelper::MODE_BUILD,
-
overwrite_export_files: DEFAULT_OVERWRITE_EXPORT_FILES,
-
source: MetadataHelper::DEFAULT_SOURCE )
-
-
67
@create_zero_length_files = create_zero_length_files
-
67
@mode = mode
-
67
@overwrite_export_files = overwrite_export_files
-
67
@source = source
-
67
@total_collections_exported = 0
-
67
@total_file_sets_exported = 0
-
67
@total_file_sets_size_exported = 0
-
67
@total_works_exported = 0
-
67
@total_users_exported = 0
-
end
-
-
1
def yaml_body_collections( out, indent:, curation_concern: )
-
1
@total_collections_exported += 1
-
1
yaml_item( out, indent, ":id:", curation_concern.id )
-
1
if source == MetadataHelper::SOURCE_DBDv2
-
1
yaml_item( out, indent, ":collection_type:", curation_concern.collection_type.machine_id, escape: true )
-
end
-
1
yaml_item( out, indent, ":edit_users:", curation_concern.edit_users, escape: true )
-
1
yaml_item_prior_identifier( out, indent, curation_concern: curation_concern )
-
1
yaml_item_subject( out, indent, curation_concern: curation_concern )
-
1
yaml_item( out, indent, ":total_work_count:", curation_concern.work_ids.count )
-
1
yaml_item( out, indent, ":total_file_size:", curation_concern.total_file_size )
-
1
yaml_item( out,
-
indent,
-
":total_file_size_human_readable:",
-
human_readable_size( curation_concern.total_file_size ),
-
escape: true )
-
1
yaml_item( out, indent, ":visibility:", curation_concern.visibility )
-
1
skip = %w[ prior_identifier rights rights_license subject subject_discipline total_file_size ]
-
1
attribute_names_collection.each do |name|
-
7
next if skip.include? name
-
1
yaml_item_collection( out, indent, curation_concern, name: name )
-
end
-
end
-
-
1
def yaml_file_size( file_set )
-
3
if file_set.file_size.blank?
-
2
file_set.original_file.nil? ? 0 : file_set.original_file.size
-
else
-
1
file_set.file_size[0]
-
end
-
end
-
-
1
def yaml_body_files( out, indent_base:, indent:, curation_concern:, target_dirname: )
-
2
indent_first_line = indent
-
2
yaml_line( out, indent_first_line, ':file_set_ids:' )
-
2
return unless curation_concern.file_sets.count.positive?
-
1
indent = indent_base + indent_first_line + "-"
-
1
curation_concern.file_sets.each do |file_set|
-
1
yaml_item( out, indent, '', file_set.id, escape: true )
-
end
-
1
curation_concern.file_sets.each do |file_set|
-
1
@total_file_sets_exported += 1
-
1
log_provenance_migrate( curation_concern: file_set, parent: curation_concern ) if MetadataHelper::MODE_MIGRATE == mode
-
1
file_id = ":#{yaml_file_set_id( file_set )}:"
-
1
yaml_line( out, indent_first_line, file_id )
-
1
indent = indent_base + indent_first_line
-
1
yaml_item( out, indent, ':id:', file_set.id, escape: true )
-
1
single_value = 1 == file_set.title.size
-
1
yaml_item( out, indent, ':title:', file_set.title, escape: true, single_value: single_value )
-
1
yaml_item_prior_identifier( out, indent, curation_concern: file_set )
-
1
file_path = yaml_export_file_path( target_dirname: target_dirname, file_set: file_set )
-
1
yaml_item( out, indent, ':file_path:', file_path.to_s, escape: true )
-
1
checksum = yaml_file_set_checksum( file_set: file_set )
-
1
yaml_item( out, indent, ":checksum_algorithm:", checksum.present? ? checksum.algorithm : '', escape: true )
-
1
yaml_item( out, indent, ":checksum_value:", checksum.present? ? checksum.value : '', escape: true )
-
1
yaml_item( out, indent, ":edit_users:", file_set.edit_users, escape: true )
-
1
file_size = yaml_file_size(file_set)
-
# puts "\nfile_size=#{file_size} file_size.class=#{file_size.class.name}\n" unless file_size.is_a? Integer
-
1
@total_file_sets_size_exported += file_size.to_i
-
1
yaml_item( out, indent, ":file_size:", file_size )
-
1
yaml_item( out, indent, ":file_size_human_readable:", human_readable_size( file_size ), escape: true )
-
1
yaml_item( out, indent, ":mime_type:", file_set.mime_type, escape: true )
-
1
value = file_set.original_checksum.blank? ? '' : file_set.original_checksum[0]
-
1
yaml_item( out, indent, ":original_checksum:", value )
-
1
value = file_set.original_file.nil? ? nil : file_set.original_file.original_name
-
1
yaml_item( out, indent, ":original_name:", value, escape: true )
-
1
yaml_item( out, indent, ":visibility:", file_set.visibility )
-
1
skip = %w[ title file_size ]
-
1
attribute_names_file_set.each do |name|
-
3
next if skip.include? name
-
1
yaml_item_file_set( out, indent, file_set, name: name )
-
end
-
end
-
end
-
-
1
def yaml_body_user_body( out, indent_base:, indent:, user: )
-
1
@total_users_exported += 1
-
1
indent_first_line = indent
-
1
user_email = ":#{yaml_user_email( user )}:"
-
1
yaml_line( out, indent_first_line, user_email )
-
1
indent = indent_base + indent_first_line
-
1
yaml_item(out, indent, ':email:', user.email, escape: true )
-
1
skip = %w[ email ]
-
1
attribute_names_user.each do |name|
-
2
next if skip.include? name
-
1
yaml_item_user(out, indent, user, name: name )
-
end
-
end
-
-
1
def yaml_body_users( out, indent_base:, indent:, users: )
-
2
yaml_item( out, indent, ":total_user_count:", users.count )
-
2
indent_first_line = indent
-
2
yaml_line( out, indent_first_line, ':user_emails:' )
-
2
return unless users.count.positive?
-
1
indent = indent_base + indent_first_line + "-"
-
1
users.each do |user|
-
3
yaml_item( out, indent, '', user.email, escape: true )
-
end
-
end
-
-
1
def yaml_body_works( out, indent:, curation_concern: )
-
2
@total_works_exported += 1
-
2
yaml_item( out, indent, ":id:", curation_concern.id )
-
2
yaml_item( out, indent, ":admin_set_id:", curation_concern.admin_set_id, escape: true )
-
2
yaml_item( out, indent, ":edit_users:", curation_concern.edit_users, escape: true )
-
2
yaml_item_prior_identifier( out, indent, curation_concern: curation_concern )
-
2
yaml_item_rights( out, indent, curation_concern: curation_concern )
-
2
yaml_item_subject( out, indent, curation_concern: curation_concern )
-
2
yaml_item( out, indent, ":total_file_count:", curation_concern.file_set_ids.count )
-
2
yaml_item( out, indent, ":total_file_size:", curation_concern.total_file_size )
-
2
yaml_item( out,
-
indent,
-
":total_file_size_human_readable:",
-
human_readable_size( curation_concern.total_file_size ),
-
escape: true )
-
2
yaml_item( out, indent, ":visibility:", curation_concern.visibility )
-
2
skip = %w[ prior_identifier rights rights_license subject subject_discipline total_file_size ]
-
2
attribute_names_work.each do |name|
-
7
next if skip.include? name
-
1
yaml_item_work( out, indent, curation_concern, name: name )
-
end
-
end
-
-
1
def yaml_escape_value( value, comment: false, escape: false )
-
5
return "" if value.nil?
-
4
return value unless escape
-
3
return value if comment
-
2
value = value.to_json
-
2
return "" if "\"\"" == value
-
1
return value
-
end
-
-
1
def yaml_export_file_path( target_dirname:, file_set: )
-
1
export_file_name = yaml_export_file_name( file_set: file_set )
-
1
target_dirname.join "#{file_set.id}_#{export_file_name}"
-
end
-
-
1
def yaml_export_file_name( file_set: )
-
2
title = file_set.title[0]
-
2
file = MetadataHelper.file_from_file_set( file_set )
-
2
if file.nil?
-
1
rv = "nil_file"
-
else
-
1
rv = file&.original_name
-
1
rv = "nil_original_file" if rv.nil?
-
end
-
2
rv = title unless title == rv
-
2
rv = rv.gsub( /[\/\?\<\>\\\:\*\|\'\"\^\;]/, '_' )
-
2
return rv
-
end
-
-
1
def yaml_file_set_checksum( file_set: )
-
2
file = MetadataHelper.file_from_file_set( file_set )
-
2
return file.checksum if file.present?
-
return nil
-
end
-
-
1
def yaml_file_set_id( file_set )
-
"f_#{file_set.id}"
-
end
-
-
1
def yaml_filename( pathname_dir:, id:, prefix:, task: )
-
pathname_dir = Pathname.new pathname_dir unless pathname_dir.is_a? Pathname
-
pathname_dir.join "#{prefix}#{id}_#{task}.yml"
-
end
-
-
1
def yaml_filename_collection( pathname_dir:, collection:, task: MetadataHelper::DEFAULT_TASK )
-
1
yaml_filename( pathname_dir: pathname_dir, id: collection.id, prefix: MetadataHelper::PREFIX_COLLECTION, task: task )
-
end
-
-
1
def yaml_filename_users( pathname_dir:, task: MetadataHelper::DEFAULT_TASK )
-
1
yaml_filename( pathname_dir: pathname_dir, id: '', prefix: MetadataHelper::PREFIX_USERS, task: task )
-
end
-
-
1
def yaml_filename_work( pathname_dir:, work:, task: MetadataHelper::DEFAULT_TASK )
-
1
yaml_filename( pathname_dir: pathname_dir, id: work.id, prefix: MetadataHelper::PREFIX_WORK, task: task )
-
end
-
-
1
def yaml_header( out, indent:, curation_concern:, header_type: )
-
1
yaml_line( out, indent, ':email:', curation_concern.depositor )
-
1
yaml_line( out, indent, ':visibility:', curation_concern.visibility )
-
1
yaml_line( out, indent, ':ingester:', '' )
-
1
yaml_line( out, indent, ':source:', source )
-
1
yaml_line( out, indent, ':export_timestamp:', DateTime.now.to_s )
-
1
yaml_line( out, indent, ':mode:', mode )
-
1
yaml_line( out, indent, ':id:', curation_concern.id )
-
1
yaml_line( out, indent, header_type )
-
end
-
-
1
def yaml_header_populate( out, indent:, rake_task: 'umrdr:populate', target_filename: )
-
1
yaml_line( out, indent, target_filename.to_s, comment: true )
-
1
yaml_line( out, indent, "bundle exec rake #{rake_task}[#{target_filename}]", comment: true )
-
1
yaml_line( out, indent, "---" )
-
1
yaml_line( out, indent, ':user:' )
-
end
-
-
1
def yaml_header_users( out, indent:, header_type: MetadataHelper::HEADER_TYPE_USERS )
-
1
yaml_line( out, indent, ':ingester:', '' )
-
1
yaml_line( out, indent, ':source:', source )
-
1
yaml_line( out, indent, ':export_timestamp:', DateTime.now.to_s )
-
1
yaml_line( out, indent, ':mode:', mode )
-
1
yaml_line( out, indent, header_type )
-
end
-
-
1
def yaml_is_a_work?( curation_concern: )
-
4
if source == MetadataHelper::SOURCE_DBDv2
-
2
curation_concern.is_a? DataSet
-
else
-
2
curation_concern.is_a? GenericWork
-
end
-
end
-
-
1
def yaml_item( out,
-
indent,
-
label,
-
value = '',
-
single_value: false,
-
comment: false,
-
indent_base: " ",
-
label_postfix: ' ',
-
escape: false )
-
-
indent = "# #{indent}" if comment
-
if single_value && value.present? && value.respond_to?( :each )
-
value = value[0]
-
out.puts "#{indent}#{label}#{label_postfix}#{yaml_escape_value( value, comment: comment, escape: escape )}"
-
elsif value.respond_to?(:each)
-
out.puts "#{indent}#{label}#{label_postfix}"
-
indent += indent_base
-
value.each { |item| out.puts "#{indent}- #{yaml_escape_value( item, comment: comment, escape: escape )}" }
-
else
-
out.puts "#{indent}#{label}#{label_postfix}#{yaml_escape_value( value, comment: comment, escape: escape )}"
-
end
-
end
-
-
1
def yaml_item_collection( out, indent, curation_concern, name: )
-
9
return if MetadataHelper::ATTRIBUTE_NAMES_IGNORE.include? name
-
2
label = ":#{name}:"
-
2
value = curation_concern[name]
-
2
return if value.blank? && !MetadataHelper::ATTRIBUTE_NAMES_ALWAYS_INCLUDE_CC.include?( name )
-
1
yaml_item( out, indent, label, value, escape: true )
-
end
-
-
1
def yaml_item_file_set( out, indent, file_set, name: )
-
9
return if MetadataHelper::ATTRIBUTE_NAMES_IGNORE.include? name
-
2
label = ":#{name}:"
-
2
value = file_set[name]
-
2
return if value.blank? && !MetadataHelper::ATTRIBUTE_NAMES_ALWAYS_INCLUDE_FILE_SET.include?( name )
-
1
yaml_item( out, indent, label, value, escape: true )
-
end
-
-
1
def yaml_item_prior_identifier( out, indent, curation_concern: )
-
2
if source == MetadataHelper::SOURCE_DBDv1
-
1
yaml_item( out, indent, ":prior_identifier:", '' )
-
else
-
# ids = curation_concern.prior_identifier
-
# ids = [] if ids.nil?
-
# ids << curation_concern.id
-
# yaml_item( out, indent, ':prior_identifier:', ActiveSupport::JSON.encode( ids ) )
-
1
yaml_item( out, indent, ":prior_identifier:", curation_concern.prior_identifier )
-
end
-
end
-
-
1
def yaml_item_referenced_by( out, indent, curation_concern: )
-
2
if source == MetadataHelper::SOURCE_DBDv1
-
1
yaml_item( out, indent, ":isReferencedBy:", curation_concern.isReferencedBy, escape: true )
-
else
-
1
yaml_item( out, indent, ":referenced_by:", curation_concern.referenced_by, escape: true )
-
end
-
end
-
-
1
def yaml_item_rights( out, indent, curation_concern: )
-
2
if source == MetadataHelper::SOURCE_DBDv1
-
1
yaml_item( out, indent, ":rights:", curation_concern.rights, escape: true )
-
else
-
1
yaml_item( out, indent, ":rights_license:", curation_concern.rights_license, escape: true )
-
end
-
end
-
-
1
def yaml_item_subject( out, indent, curation_concern: )
-
2
if source == MetadataHelper::SOURCE_DBDv1
-
1
yaml_item( out, indent, ":subject:", curation_concern.subject, escape: true )
-
else
-
1
yaml_item( out, indent, ":subject_discipline:", curation_concern.subject_discipline, escape: true )
-
end
-
end
-
-
1
def yaml_item_user( out, indent, user, name: )
-
6
return if MetadataHelper::ATTRIBUTE_NAMES_USER_IGNORE.include? name
-
2
label = ":#{name}:"
-
2
value = user[name]
-
2
return if value.blank? && !MetadataHelper::ATTRIBUTE_NAMES_ALWAYS_INCLUDE_USER.include?( name )
-
1
yaml_item( out, indent, label, value, escape: true )
-
end
-
-
1
def yaml_item_work( out, indent, curation_concern, name: )
-
9
return if MetadataHelper::ATTRIBUTE_NAMES_IGNORE.include? name
-
2
label = ":#{name}:"
-
2
value = curation_concern[name]
-
2
return if value.blank? && !MetadataHelper::ATTRIBUTE_NAMES_ALWAYS_INCLUDE_CC.include?( name )
-
1
yaml_item( out, indent, label, value, escape: true )
-
end
-
-
1
def yaml_line( out, indent, label, value = '', comment: false, label_postfix: ' ', escape: false )
-
2
indent = "# #{indent}" if comment
-
2
out.puts "#{indent}#{label}#{label_postfix}#{yaml_escape_value( value, comment: comment, escape: escape )}"
-
end
-
-
1
def yaml_populate_collection( collection:,
-
dir: MetadataHelper::DEFAULT_BASE_DIR,
-
out: nil,
-
populate_works: true,
-
export_files: true,
-
target_filename: nil,
-
target_dirname: nil )
-
-
3
target_file = nil
-
3
dir = Pathname.new dir unless dir.is_a? Pathname
-
3
if out.nil?
-
collection = Collection.find collection if collection.is_a? String
-
target_file = yaml_filename_collection( pathname_dir: dir, collection: collection )
-
target_dir = yaml_targetdir_collection( pathname_dir: dir, collection: collection )
-
Dir.mkdir( target_dir ) if export_files && !Dir.exist?( target_dir )
-
open( target_file, 'w' ) do |out2|
-
yaml_populate_collection( collection: collection,
-
out: out2,
-
populate_works: populate_works,
-
export_files: false,
-
target_filename: target_file,
-
target_dirname: target_dir )
-
end
-
if export_files
-
collection.member_objects.each do |work|
-
next unless yaml_is_a_work?( curation_concern: work )
-
yaml_work_export_files( work: work, target_dirname: target_dir )
-
end
-
end
-
else
-
3
log_provenance_migrate( curation_concern: collection ) if MetadataHelper::MODE_MIGRATE == mode
-
3
indent_base = " " * 2
-
3
indent = indent_base * 0
-
3
yaml_header_populate( out, indent: indent, target_filename: target_filename )
-
3
indent = indent_base * 1
-
3
yaml_header( out,
-
indent: indent,
-
curation_concern: collection,
-
header_type: MetadataHelper::HEADER_TYPE_COLLECTIONS )
-
3
indent = indent_base * 2
-
3
yaml_body_collections( out, indent: indent, curation_concern: collection )
-
3
return unless populate_works
-
2
return unless collection.member_objects.size.positive?
-
1
indent = indent_base * 2
-
1
yaml_line( out, indent, MetadataHelper::HEADER_TYPE_WORKS )
-
1
indent = indent_base + indent + "-"
-
1
collection.member_objects.each do |work|
-
2
next unless yaml_is_a_work?( curation_concern: work )
-
1
yaml_item( out, indent, '', work.id, escape: true )
-
end
-
1
indent = indent_base * 2
-
1
collection.member_objects.each do |work|
-
2
next unless yaml_is_a_work?( curation_concern: work )
-
1
indent = indent_base * 2
-
1
yaml_line( out, indent, ":works_#{work.id}:" )
-
1
indent = indent_base * 3
-
1
log_provenance_migrate( curation_concern: work, parent: collection ) if MetadataHelper::MODE_MIGRATE == mode
-
1
yaml_body_works( out, indent: indent, curation_concern: work )
-
1
yaml_body_files( out,
-
indent_base: indent_base,
-
indent: indent,
-
curation_concern: work,
-
target_dirname: target_dirname )
-
end
-
end
-
end
-
-
1
def yaml_populate_stats
-
1
stats = {}
-
1
stats[:total_collections_exported] = @total_collections_exported
-
1
stats[:total_works_exported] = @total_works_exported
-
1
stats[:total_file_sets_exported] = @total_file_sets_exported
-
1
stats[:total_file_sets_size_exported] = @total_file_sets_size_exported
-
1
stats[:total_file_sets_size_readable_exported] = human_readable_size @total_file_sets_size_exported
-
1
stats[:total_users_exported] = @total_users_exported
-
1
return stats
-
end
-
-
1
def yaml_populate_users( dir: MetadataHelper::DEFAULT_BASE_DIR, out: nil, target_filename: nil )
-
1
target_file = nil
-
1
dir = Pathname.new dir unless dir.is_a? Pathname
-
1
Dir.mkdir( dir ) unless Dir.exist? dir
-
1
if out.nil?
-
target_file = yaml_filename_users( pathname_dir: dir, task: mode )
-
open( target_file, 'w' ) do |out2|
-
yaml_populate_users( out: out2, target_filename: target_file )
-
end
-
else
-
# log_provenance_migrate( curation_concern: curation_concern ) if MetadataHelper::MODE_MIGRATE == mode
-
1
indent_base = " " * 2
-
1
indent = indent_base * 0
-
1
yaml_header_populate( out, indent: indent, rake_task: 'umrdr:populate_users', target_filename: target_filename )
-
1
indent = indent_base * 1
-
1
yaml_header_users( out, indent: indent )
-
1
indent = indent_base * 2
-
1
users = User.all
-
1
yaml_body_users( out, indent_base: indent_base, indent: indent, users: users )
-
1
users.each do |user|
-
1
yaml_body_user_body( out, indent_base: indent_base, indent: indent, user: user )
-
end
-
end
-
1
return target_file
-
end
-
-
1
def yaml_populate_work( curation_concern:,
-
dir: MetadataHelper::DEFAULT_BASE_DIR,
-
out: nil,
-
export_files: true,
-
target_filename: nil,
-
target_dirname: nil )
-
-
2
target_file = nil
-
2
dir = Pathname.new dir unless dir.is_a? Pathname
-
2
if out.nil?
-
curation_concern = yaml_work_find( curation_concern: curation_concern ) if curation_concern.is_a? String
-
target_file = yaml_filename_work( pathname_dir: dir, work: curation_concern )
-
target_dir = yaml_targetdir_work( pathname_dir: dir, work: curation_concern )
-
Dir.mkdir( target_dir ) if export_files && !Dir.exist?( target_dir )
-
open( target_file, 'w' ) do |out2|
-
yaml_populate_work( curation_concern: curation_concern,
-
out: out2,
-
export_files: export_files,
-
target_filename: target_file,
-
target_dirname: target_dir )
-
end
-
if export_files
-
yaml_work_export_files( work: curation_concern, target_dirname: target_dir )
-
end
-
else
-
2
log_provenance_migrate( curation_concern: curation_concern ) if MetadataHelper::MODE_MIGRATE == mode
-
2
indent_base = " " * 2
-
2
indent = indent_base * 0
-
2
yaml_header_populate( out, indent: indent, target_filename: target_filename )
-
2
indent = indent_base * 1
-
2
yaml_header( out,
-
indent: indent,
-
curation_concern: curation_concern,
-
header_type: MetadataHelper::HEADER_TYPE_WORKS )
-
2
indent = indent_base * 2
-
2
yaml_body_works( out, indent: indent, curation_concern: curation_concern )
-
2
yaml_body_files( out,
-
indent_base: indent_base,
-
indent: indent,
-
curation_concern: curation_concern,
-
target_dirname: target_dirname )
-
end
-
2
return target_file
-
end
-
-
1
def yaml_targetdir( pathname_dir:, id:, prefix:, task: )
-
pathname_dir = Pathname.new pathname_dir unless pathname_dir.is_a? Pathname
-
pathname_dir.join "#{prefix}#{id}_#{task}"
-
end
-
-
1
def yaml_targetdir_collection( pathname_dir:, collection:, task: MetadataHelper::DEFAULT_TASK )
-
1
yaml_targetdir( pathname_dir: pathname_dir, id: collection.id, prefix: MetadataHelper::PREFIX_COLLECTION, task: task )
-
end
-
-
1
def yaml_targetdir_users( pathname_dir:, task: MetadataHelper::DEFAULT_TASK )
-
1
yaml_targetdir( pathname_dir: pathname_dir, id: '', prefix: MetadataHelper::PREFIX_USERS, task: task )
-
end
-
-
1
def yaml_targetdir_work( pathname_dir:, work:, task: MetadataHelper::DEFAULT_TASK )
-
1
yaml_targetdir( pathname_dir: pathname_dir, id: work.id, prefix: MetadataHelper::PREFIX_WORK, task: task )
-
end
-
-
1
def yaml_user_email( user )
-
1
"user_#{user.email}"
-
end
-
-
1
def yaml_work_export_files( work:, target_dirname: nil, log_filename: nil )
-
1
log_file = target_dirname.join ".export.log" if log_filename.nil?
-
1
open( log_file, 'w' ) { |f| f.write('') } # erase log file
-
start_time = Time.now
-
log_lines( log_file,
-
"Starting yaml work export of files at #{start_time} ...",
-
"Generic work id: #{work.id}",
-
"Total file count: #{work.file_sets.count}")
-
total_byte_count = 0
-
if work.file_sets.count.positive?
-
work.file_sets.each do |file_set|
-
export_file_name = yaml_export_file_path( target_dirname: target_dirname, file_set: file_set )
-
write_file = if overwrite_export_files
-
true
-
else
-
!File.exist?( export_file_name )
-
end
-
file = MetadataHelper.file_from_file_set( file_set )
-
file_size = if file_set.file_size.blank?
-
file_set.original_file.nil? ? 0 : file_set.original_file.size
-
else
-
file_set.file_size[0]
-
end
-
export_what = "#{export_file_name} (#{human_readable_size(file_size)} / #{file_size} bytes)"
-
if write_file && file.present?
-
source_uri = file.uri.value
-
log_lines( log_file, "Starting file export of #{export_what} at #{Time.now}." )
-
bytes_copied = ExportFilesHelper.export_file_uri( source_uri: source_uri, target_file: export_file_name )
-
total_byte_count += bytes_copied
-
log_lines( log_file, "Finished file export of #{export_what} at #{Time.now}." )
-
elsif write_file && file.nil? && export_file_name.present?
-
if create_zero_length_files
-
log_lines( log_file, "File export of file_set #{file_set.id} -- #{export_what} at #{Time.now} creating zero length file because file is nil." )
-
open( export_file_name, 'w' ) { |out| out.write( '' ) }
-
else
-
log_lines( log_file, "WARNING: Skipping file export of file_set #{file_set.id} -- #{export_what} at #{Time.now} because file is nil." )
-
end
-
elsif write_file && file.nil?
-
log_lines( log_file, "WARNING: Skipping file export of file_set #{file_set.id} -- #{export_what} at #{Time.now} because file is nil and export_file_name is empty." )
-
else
-
log_lines( log_file, "Skipping file export of #{export_what} at #{Time.now}." )
-
end
-
end
-
end
-
end_time = Time.now
-
log_lines( log_file,
-
"Total bytes exported: #{total_byte_count} (#{human_readable_size(total_byte_count)})",
-
"... finished yaml generic work export of files at #{end_time}.")
-
rescue Exception => e # rubocop:disable Lint/RescueException
-
# rubocop:disable Rails/Output
-
1
puts "#{e.class}: #{e.message} at #{e.backtrace.join("\n")}"
-
# rubocop:enable Rails/Output
-
end
-
-
1
def yaml_work_find( curation_concern: )
-
2
if source == MetadataHelper::SOURCE_DBDv2
-
1
DataSet.find curation_concern
-
else
-
1
GenericWork.find curation_concern
-
end
-
end
-
-
1
def self.init_attribute_names_always_include_cc
-
rv = {}
-
MetadataHelper::ATTRIBUTE_NAMES_ALWAYS_INCLUDE_CC.each { |name| rv[name] = true }
-
return rv
-
end
-
-
1
protected
-
-
1
def attribute_names_always_include_cc
-
@@attribute_names_always_include ||= init_attribute_names_always_include_cc
-
end
-
-
1
def attribute_names_collection
-
@@attribute_names_collection ||= Collection.attribute_names.sort
-
end
-
-
1
def attribute_names_file_set
-
@@attribute_names_file_set ||= FileSet.attribute_names.sort
-
end
-
-
1
def attribute_names_user
-
@@attribute_names_user ||= User.attribute_names.sort
-
end
-
-
1
def attribute_names_work
-
if source == MetadataHelper::SOURCE_DBDv2
-
DataSet.attribute_names.sort
-
else
-
GenericWork.attribute_names.sort
-
end
-
end
-
-
1
def file_from_file_set( file_set )
-
file = nil
-
files = file_set.files
-
unless files.nil? || files.size.zero?
-
file = files[0]
-
files.each do |f|
-
file = f unless f.original_name.empty?
-
end
-
end
-
return file
-
end
-
-
1
def human_readable_size( value )
-
value = value.to_i
-
return ActiveSupport::NumberHelper::NumberToHumanSizeConverter.convert( value, precision: 3 )
-
end
-
-
1
def log_lines( filename, *lines )
-
File.open( filename, "a" ) do |f|
-
lines.each { |line| f.puts line }
-
end
-
end
-
-
1
def log_provenance_migrate( curation_concern:, parent: nil, migrate_direction: 'export' )
-
if source == MetadataHelper::SOURCE_DBDv1
-
msg = "Migrate #{migrate_direction} #{curation_concern.class.name} #{curation_concern.id}"
-
msg += " parent_id: #{parent.id}" if parent.present?
-
PROV_LOGGER.info( msg )
-
else
-
return unless curation_concern.respond_to? :provenance_migrate
-
parent_id = nil
-
parent_id = parent.id if parent.present?
-
curation_concern.provenance_migrate( current_user: nil,
-
parent_id: parent_id,
-
migrate_direction: migrate_direction )
-
end
-
end
-
-
1
def metadata_filename_collection( pathname_dir, collection )
-
pathname_dir.join "w_#{collection.id}_metadata_report.txt"
-
end
-
-
1
def metadata_filename_collection_work( pathname_dir, collection, work )
-
pathname_dir.join "c_#{collection.id}_w_#{work.id}_metadata_report.txt"
-
end
-
-
1
def metadata_filename_work( pathname_dir, work )
-
pathname_dir.join "w_#{work.id}_metadata_report.txt"
-
end
-
-
1
def metadata_multi_valued?( attribute_value )
-
return false if attribute_value.blank?
-
return true if attribute_value.respond_to?( :each ) && 1 < attribute_value.size
-
false
-
end
-
-
end
-
-
end
-
1
module Hyrax
-
1
module Collections
-
# Responsible for retrieving collection members
-
1
class CollectionMemberService
-
1
attr_reader :scope, :params, :collection
-
1
delegate :repository, to: :scope
-
-
# @param scope [#repository] Typically a controller object which responds to :repository
-
# @param [Collection]
-
# @param [ActionController::Parameters] query params
-
1
def initialize(scope:, collection:, params:)
-
@scope = scope
-
@collection = collection
-
@params = params
-
end
-
-
# @api public
-
#
-
# Collections which are members of the given collection
-
# @return [Blacklight::Solr::Response] {up to 50 solr documents}
-
1
def available_member_subcollections
-
::Deepblue::LoggingHelper.bold_debug [ ::Deepblue::LoggingHelper.here,
-
::Deepblue::LoggingHelper.called_from,
-
"subcollections_search_builder=#{subcollections_search_builder}",
-
"params_for_subcollections=#{params_for_subcollections}",
-
"" ]
-
query_solr(query_builder: subcollections_search_builder, query_params: params_for_subcollections)
-
end
-
-
# @api public
-
#
-
# Works which are members of the given collection
-
# @return [Blacklight::Solr::Response]
-
1
def available_member_works
-
::Deepblue::LoggingHelper.bold_debug [ ::Deepblue::LoggingHelper.here,
-
::Deepblue::LoggingHelper.called_from,
-
"works_search_builder=#{works_search_builder}",
-
"params=#{params}",
-
"" ]
-
query_solr(query_builder: works_search_builder, query_params: params)
-
end
-
-
# @api public
-
#
-
# Work ids of the works which are members of the given collection
-
# @return [Blacklight::Solr::Response]
-
1
def available_member_work_ids
-
query_solr_with_field_selection(query_builder: work_ids_search_builder, fl: 'id')
-
end
-
-
1
private
-
-
# @api private
-
#
-
# set up a member search builder for works only
-
# @return [CollectionMemberSearchBuilder] new or existing
-
1
def works_search_builder
-
@works_search_builder ||= Hyrax::CollectionMemberSearchBuilder.new(scope: scope, collection: collection, search_includes_models: :works)
-
end
-
-
# @api private
-
#
-
# set up a member search builder for collections only
-
# @return [CollectionMemberSearchBuilder] new or existing
-
1
def subcollections_search_builder
-
@subcollections_search_builder ||= Hyrax::CollectionMemberSearchBuilder.new(scope: scope, collection: collection, search_includes_models: :collections)
-
end
-
-
# @api private
-
#
-
# set up a member search builder for returning work ids only
-
# @return [CollectionMemberSearchBuilder] new or existing
-
1
def work_ids_search_builder
-
@work_ids_search_builder ||= Hyrax::CollectionMemberSearchBuilder.new(scope: scope, collection: collection, search_includes_models: :works)
-
end
-
-
# @api private
-
#
-
1
def query_solr(query_builder:, query_params:)
-
repository.search(query_builder.with(query_params).query)
-
end
-
-
# @api private
-
#
-
1
def query_solr_with_field_selection(query_builder:, fl:)
-
repository.search(query_builder.merge(fl: fl).query)
-
end
-
-
# @api private
-
#
-
# Blacklight pagination still needs to be overridden and set up for the subcollections.
-
# @return <Hash> the additional inputs required for the subcollection member search builder
-
1
def params_for_subcollections
-
# To differentiate current page for works vs subcollections, we have to use a sub_collection_page
-
# param. Map this to the page param before querying for subcollections, if it's present
-
params[:page] = params.delete(:sub_collection_page)
-
params
-
end
-
end
-
end
-
end
-
# frozen_string_literal: true
-
-
1
module Hyrax
-
-
1
class EmbargoService < RestrictionService
-
-
1
class << self
-
#
-
# Methods for Querying Repository to find Embargoed Objects
-
#
-
-
# Returns all assets with embargo release date set to a date in the past
-
1
def assets_with_expired_embargoes
-
::Deepblue::LoggingHelper.bold_debug [ ::Deepblue::LoggingHelper.here,
-
::Deepblue::LoggingHelper.called_from,
-
"" ]
-
builder = Hyrax::ExpiredEmbargoSearchBuilder.new(self)
-
presenters(builder)
-
end
-
-
# Returns all assets with embargo release date set
-
# (assumes that when lease visibility is applied to assets
-
# whose leases have expired, the lease expiration date will be removed from its metadata)
-
1
def assets_under_embargo
-
::Deepblue::LoggingHelper.bold_debug [ ::Deepblue::LoggingHelper.here,
-
::Deepblue::LoggingHelper.called_from,
-
"" ]
-
builder = Hyrax::EmbargoSearchBuilder.new(self)
-
presenters(builder)
-
end
-
-
# Returns all assets that have had embargoes deactivated in the past.
-
1
def assets_with_deactivated_embargoes
-
::Deepblue::LoggingHelper.bold_debug [ ::Deepblue::LoggingHelper.here,
-
::Deepblue::LoggingHelper.called_from,
-
"" ]
-
builder = Hyrax::DeactivatedEmbargoSearchBuilder.new(self)
-
presenters(builder)
-
end
-
-
# Returns all assets with embargo release date set to a date in the past
-
1
def my_assets_with_expired_embargoes( current_user_key )
-
::Deepblue::LoggingHelper.bold_debug [ ::Deepblue::LoggingHelper.here,
-
::Deepblue::LoggingHelper.called_from,
-
"" ]
-
builder = Hyrax::My::ExpiredEmbargoSearchBuilder.new(self)
-
builder.current_user_key = current_user_key
-
presenters(builder)
-
end
-
-
# Returns all assets with embargo release date set
-
# (assumes that when lease visibility is applied to assets
-
# whose leases have expired, the lease expiration date will be removed from its metadata)
-
1
def my_assets_under_embargo( current_user_key )
-
::Deepblue::LoggingHelper.bold_debug [ ::Deepblue::LoggingHelper.here,
-
::Deepblue::LoggingHelper.called_from,
-
"" ]
-
builder = Hyrax::My::EmbargoSearchBuilder.new(self)
-
builder.current_user_key = current_user_key
-
presenters(builder)
-
end
-
-
# Returns all assets that have had embargoes deactivated in the past.
-
1
def my_assets_with_deactivated_embargoes( current_user_key )
-
::Deepblue::LoggingHelper.bold_debug [ ::Deepblue::LoggingHelper.here,
-
::Deepblue::LoggingHelper.called_from,
-
"" ]
-
builder = Hyrax::My::DeactivatedEmbargoSearchBuilder.new(self)
-
builder.current_user_key = current_user_key
-
presenters(builder)
-
end
-
-
1
private
-
-
1
def presenter_class
-
Hyrax::EmbargoPresenter
-
end
-
-
end
-
-
end
-
-
end
-
# this monkey overrides Hydra::FutureDateValidator in the gem hydra-access-controls
-
-
1
module Hydra
-
-
1
class FutureDateValidator < ActiveModel::EachValidator
-
-
1
def validate_each(record, attribute, value)
-
if value.present?
-
begin
-
if date = value.to_date
-
if attribute.to_s == "embargo_release_date"
-
return unless DeepBlueDocs::Application.config.embargo_enforce_future_release_date
-
end
-
if date <= Date.today
-
# Deepblue::LoggingHelper.bold_debug [ Deepblue::LoggingHelper.here,
-
# Deepblue::LoggingHelper.called_from,
-
# "date=#{date}",
-
# "attribute=#{attribute}" ] # + caller_locations(1, 40)
-
record.errors[attribute] << "FutureDateValidator says Must be a future date"
-
end
-
else
-
record.errors[attribute] << "Invalid Date Format"
-
end
-
rescue ArgumentError, NoMethodError
-
record.errors[attribute] << "Invalid Date Format"
-
end
-
end
-
end
-
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
require_relative 'boot'
-
1
require 'rails/all'
-
# require 'app/model/concerns/hydra/access_controls/access_right'
-
1
require File.join(Gem::Specification.find_by_name("hydra-access-controls").full_gem_path, "app/models/concerns/hydra/access_controls/access_right.rb")
-
# require_relative '../lib/rack_multipart_buf_size_setter.rb'
-
-
# Require the gems listed in Gemfile, including any gems
-
# you've limited to :test, :development, or :production.
-
1
Bundler.require(*Rails.groups)
-
-
# rubocop:disable Rails/Output
-
1
module DeepBlueDocs
-
-
1
class Application < Rails::Application
-
1
config.autoload_paths += [Rails.root.join('lib')]
-
1
config.eager_load_paths += [Rails.root.join('lib')]
-
-
# https://bibwild.wordpress.com/2016/12/27/a-class_eval-monkey-patching-pattern-with-prepend/
-
1
config.to_prepare do
-
# Load any monkey-patching extensions in to_prepare for Rails dev-mode class-reloading.
-
1
Dir.glob(File.join(File.dirname(__FILE__), "../lib/extensions/extensions.rb")) do |c|
-
1
Rails.configuration.cache_classes ? require(c) : load(c)
-
end
-
end
-
-
# Settings in config/environments/* take precedence over those specified here.
-
# Application configuration should go into files in config/initializers
-
# -- all .rb files in that directory are automatically loaded.
-
#
-
# reference config values like: DeepBlueDocs::Application.config.variable_name
-
-
# Chimera configuration goes here
-
# config.authentication_method = "generic"
-
1
config.authentication_method = "iu"
-
# config.authentication_method = "umich"
-
-
1
config.generators do |g|
-
1
g.test_framework :rspec, spec: true
-
end
-
-
# config.middleware.insert_before Rack::Runtime, RackMultipartBufSizeSetter
-
-
# config.dbd_version = 'DBDv1'
-
1
config.dbd_version = 'DBDv2'
-
-
1
config.show_masthead_announcement = false
-
-
# puts "config.time_zone=#{config.time_zone}"
-
1
config.timezone_offset = DateTime.now.offset
-
1
config.timezone_zone = DateTime.now.zone
-
1
config.datetime_stamp_display_local_time_zone = true
-
-
## ensure tmp directories are defined
-
1
verbose_init = false
-
1
puts "ENV['TMPDIR']=#{ENV['TMPDIR']}" if verbose_init
-
1
puts "ENV['_JAVA_OPTIONS']=#{ENV['_JAVA_OPTIONS']}" if verbose_init
-
1
puts "ENV['JAVA_OPTIONS']=#{ENV['JAVA_OPTIONS']}" if verbose_init
-
1
tmpdir = ENV['TMPDIR']
-
1
if tmpdir.blank? || tmpdir == '/tmp' || tmpdir.start_with?( '/tmp/' )
-
1
tmpdir = File.absolute_path( './tmp/derivatives/' )
-
1
ENV['TMPDIR'] = tmpdir
-
end
-
1
ENV['_JAVA_OPTIONS'] = "-Djava.io.tmpdir=#{tmpdir}" if ENV['_JAVA_OPTIONS'].blank?
-
1
ENV['JAVA_OPTIONS'] = "-Djava.io.tmpdir=#{tmpdir}" if ENV['JAVA_OPTIONS'].blank?
-
1
puts "ENV['TMPDIR']=#{ENV['TMPDIR']}"
-
1
puts "ENV['_JAVA_OPTIONS']=#{ENV['_JAVA_OPTIONS']}" if verbose_init
-
1
puts "ENV['JAVA_OPTIONS']=#{ENV['JAVA_OPTIONS']}" if verbose_init
-
1
puts `echo $TMPDIR`.to_s if verbose_init
-
1
puts `echo $_JAVA_OPTIONS`.to_s if verbose_init
-
1
puts `echo $JAVA_OPTIONS`.to_s if verbose_init
-
-
# For properly generating URLs and minting DOIs - the app may not by default
-
# Outside of a request context the hostname needs to be provided.
-
1
config.hostname = Settings.hostname
-
# puts "config.hostname=#{config.hostname}"
-
-
## configure box
-
-
1
config.box_enabled = false
-
1
config.box_developer_token = nil # replace this with a developer token to override Single Auth
-
# config.box_developer_token = 'IGmQMmqw8coKpuQDN3EG4gBrDzn78sGr'.freeze
-
1
config.box_dlib_dbd_box_user_id = '3200925346'
-
1
config.box_ulib_dbd_box_id = '45101723215'
-
1
config.box_verbose = true
-
1
config.box_always_report_not_logged_in_errors = true
-
1
config.box_create_dirs_for_empty_works = true
-
1
config.box_access_and_refresh_token_file = Rails.root.join( 'config', 'box_config.yml' ).freeze
-
1
config.box_access_and_refresh_token_file_init = Rails.root.join( 'config', 'box_config_init.yml' ).freeze
-
1
config.box_integration_enabled = config.box_enabled && ( !config.box_developer_token.nil? ||
-
File.exist?( config.box_access_and_refresh_token_file ) )
-
-
## configure embargo
-
1
config.embargo_enforce_future_release_date = true # now that we have automated embargo expiration
-
1
config.embargo_visibility_after_default_status = ::Hydra::AccessControls::AccessRight::VISIBILITY_TEXT_VALUE_PUBLIC
-
1
config.embargo_visibility_during_default_status = ::Hydra::AccessControls::AccessRight::VISIBILITY_TEXT_VALUE_PRIVATE
-
1
config.embargo_manage_hide_files = true
-
1
config.embargo_allow_children_unembargo_choice = false
-
1
config.embargo_email_rds_hostnames = [ 'testing.deepblue.lib.umich.edu',
-
'staging.deepblue.lib.umich.edu',
-
'deepblue.lib.umich.edu' ].freeze
-
1
config.embargo_about_to_expire_email_rds = config.embargo_email_rds_hostnames.include? config.hostname
-
1
config.embargo_deactivate_email_rds = config.embargo_email_rds_hostnames.include? config.hostname
-
-
## configure for Globus
-
# -- To enable Globus for development, create /deepbluedata-globus/download and /deepbluedata-globus/prep
-
1
config.globus_era_timestamp = Time.now.freeze
-
1
config.globus_era_token = config.globus_era_timestamp.to_s.freeze
-
1
if Rails.env.test?
-
1
config.globus_dir = '/tmp/deepbluedata-globus'
-
1
Dir.mkdir config.globus_dir unless Dir.exist? config.globus_dir
-
else
-
config.globus_dir = Settings.globus_dir
-
end
-
# puts "globus_dir=#{config.globus_dir}"
-
1
config.globus_dir = Pathname.new config.globus_dir
-
1
config.globus_download_dir = config.globus_dir.join 'download'
-
1
config.globus_prep_dir = config.globus_dir.join 'prep'
-
1
if Rails.env.test?
-
1
Dir.mkdir config.globus_download_dir unless Dir.exist? config.globus_download_dir
-
1
Dir.mkdir config.globus_prep_dir unless Dir.exist? config.globus_prep_dir
-
end
-
1
config.globus_enabled = true && Dir.exist?( config.globus_download_dir ) && Dir.exist?( config.globus_prep_dir )
-
1
config.base_file_name = "DataCORE_"
-
1
config.globus_base_url = 'https://app.globus.org/file-manager?origin_id=99d8c648-a9ff-11e7-aedd-22000a92523b&origin_path=%2Fdownload%2F'
-
1
config.globus_restart_all_copy_jobs_quiet = true
-
1
config.globus_debug_delay_per_file_copy_job_seconds = 0
-
1
config.globus_after_copy_job_ui_delay_seconds = 3
-
1
if Rails.env.production?
-
config.globus_copy_file_group = "dbdglobus"
-
else
-
1
config.globus_copy_file_group = nil
-
end
-
1
config.globus_copy_file_permissions = "u=rw,g=rw,o=r"
-
-
# deposit notification email addresses
-
1
config.notification_email = Settings.notification_email
-
1
config.user_email = Settings.user_email
-
-
1
config.max_file_size = 2 * ( 1024 ** 3 )
-
1
config.max_file_size_str = ActiveSupport::NumberHelper::NumberToHumanSizeConverter.convert(config.max_file_size, {})
-
-
1
config.max_total_file_size = config.max_file_size * 5
-
1
config.max_total_file_size_str = ActiveSupport::NumberHelper::NumberToHumanSizeConverter.convert(config.max_total_file_size, {})
-
-
1
config.max_work_file_size_to_download = 10_000_000_000
-
1
config.min_work_file_size_to_download_warn = 1_000_000_000
-
-
## configure jira integration
-
1
config.jira_integration_hostnames = [ 'deepblue.local',
-
'testing.deepblue.lib.umich.edu',
-
'staging.deepblue.lib.umich.edu',
-
'deepblue.lib.umich.edu' ].freeze
-
1
config.jira_integration_hostnames_prod = [ 'deepblue.lib.umich.edu' ].freeze
-
1
config.jira_integration_enabled = config.jira_integration_hostnames.include? config.hostname
-
1
config.jira_test_mode = !config.jira_integration_hostnames_prod.include?( config.hostname )
-
1
config.jira_manager_project_key = 'DBHELP'
-
1
config.jira_manager_issue_type = 'Data Deposit'
-
# config.jira_manager_project_key = 'BLUEDOC'
-
# config.jira_manager_issue_type = 'Story'
-
-
### file upload and ingest
-
1
config.notify_user_file_upload_and_ingest_are_complete = true
-
1
config.notify_managers_file_upload_and_ingest_are_complete = true
-
-
# ingest characterization config
-
1
config.characterize_excluded_ext_set = { '.csv' => 'text/plain' }.freeze # , '.nc' => 'text/plain' }.freeze
-
1
config.characterize_enforced_mime_type = { '.csv' => 'text/csv' }.freeze # , '.nc' => 'text/plain' }.freeze
-
-
# ingest derivative config
-
1
config.derivative_excluded_ext_set = {}.freeze
-
1
config.derivative_max_file_size = 4_000_000_000 # set to -1 for no limit
-
1
config.derivative_max_file_size_str = ActiveSupport::NumberHelper::NumberToHumanSizeConverter.convert(config.derivative_max_file_size, precision: 3 )
-
-
-
# URL for logging the user out of Cosign
-
1
config.logout_prefix = "https://weblogin.umich.edu/cgi-bin/logout?"
-
-
# See references to: DeepBlueDocs::Application.config.relative_url_root
-
1
config.relative_url_root = Settings.relative_url_root unless Rails.env.test?
-
-
# Set the default host for resolving _url methods
-
1
Rails.application.routes.default_url_options[:host] = config.hostname
-
-
-
-
# ingest virus scan config
-
1
config.virus_scan_max_file_size = 4_000_000_000
-
1
config.virus_scan_retry = true
-
1
config.virus_scan_retry_on_error = false
-
1
config.virus_scan_retry_on_service_unavailable = true
-
1
config.virus_scan_retry_on_unknown = false
-
-
1
config.do_ordered_list_hack = true
-
1
config.do_ordered_list_hack_save = true
-
-
1
config.email_enabled = true
-
1
config.email_log_echo_to_rails_logger = true
-
-
1
config.provenance_log_name = "provenance_#{Rails.env}.log"
-
1
config.provenance_log_path = Rails.root.join( 'log', config.provenance_log_name )
-
1
config.provenance_log_echo_to_rails_logger = true
-
1
config.provenance_log_redundant_events = true
-
-
1
config.scheduler_log_echo_to_rails_logger = true
-
1
config.scheduler_job_file = 'scheduler_jobs_prod.yml'
-
1
config.scheduler_heartbeat_email_targets = [ 'fritx@umich.edu' ] # leave empty to disable
-
-
1
config.upload_log_echo_to_rails_logger = true
-
-
end
-
-
end
-
# rubocop:enable Rails/Output
-
1
ENV['BUNDLE_GEMFILE'] ||= File.expand_path('../Gemfile', __dir__)
-
-
1
require 'bundler/setup' # Set up gems listed in the Gemfile.
-
# Load the Rails application.
-
1
require_relative 'application'
-
-
# load commit version for footer
-
1
ENV['SOURCE_COMMIT'] = `git rev-parse --short HEAD`.squish unless ENV['SOURCE_COMMIT'].present?
-
1
ENV['SOURCE_VERSION'] = `git describe --tags --abbrev=0`.squish unless ENV['SOURCE_VERSION'].present?
-
-
# Initialize the Rails application.
-
1
Rails.application.initialize!
-
# frozen_string_literal: true
-
-
1
require "email_logger"
-
1
require "provenance_logger"
-
-
1
Rails.application.configure do
-
# Settings specified here will take precedence over those in config/application.rb.
-
# The test environment is used exclusively to run your application's
-
# test suite. You never need to work with it otherwise. Remember that
-
# your test database is "scratch space" for the test suite and is wiped
-
# and recreated between test runs. Don't rely on the data there!
-
1
config.cache_classes = true
-
-
# Do not eager load code on boot. This avoids loading your whole application
-
# just for the purpose of running a single test. If you are using a tool that
-
# preloads Rails for running tests, you may have to set it to true.
-
1
config.eager_load = false
-
-
# Configure public file server for tests with Cache-Control for performance.
-
1
config.public_file_server.enabled = true
-
1
config.public_file_server.headers = {
-
'Cache-Control' => 'public, max-age=3600'
-
}
-
-
# Show full error reports and disable caching.
-
1
config.consider_all_requests_local = true
-
1
config.action_controller.perform_caching = false
-
-
# Raise exceptions instead of rendering exception templates.
-
1
config.action_dispatch.show_exceptions = false
-
-
# Disable request forgery protection in test environment.
-
1
config.action_controller.allow_forgery_protection = false
-
1
config.action_mailer.perform_caching = false
-
-
# Tell Action Mailer not to deliver emails to the real world.
-
# The :test delivery method accumulates sent emails in the
-
# ActionMailer::Base.deliveries array.
-
1
config.action_mailer.delivery_method = :test
-
-
# Print deprecation notices to the stderr.
-
1
config.active_support.deprecation = :stderr
-
-
# Raises error for missing translations
-
# config.action_view.raise_on_missing_translations = true
-
-
1
config.notification_email = 'fake@sample.com'
-
-
end
-
1
Flipflop.configure do
-
1
feature :only_use_data_set_work_type,
-
default: true,
-
description: "Only give users ability to create Data Set Work Type"
-
-
1
feature :limit_browse_options,
-
default: true,
-
description: "Limit the users browse options"
-
-
1
feature :dir_upload,
-
default: false,
-
description: "Allow user to upload files for work from a directory."
-
-
end
-
# Be sure to restart your server when you modify this file.
-
-
# ApplicationController.renderer.defaults.merge!(
-
# http_host: 'example.org',
-
# https: false
-
# )
-
# Be sure to restart your server when you modify this file.
-
-
# Version of your assets, change this if you want to expire all your assets.
-
1
Rails.application.config.assets.version = '1.0'
-
-
# Add additional assets to the asset load path
-
# Rails.application.config.assets.paths << Emoji.images_path
-
-
# Precompile additional assets.
-
# application.js, application.css, and all non-JS/CSS in app/assets folder are already added.
-
# Rails.application.config.assets.precompile += %w( search.js )
-
# Be sure to restart your server when you modify this file.
-
-
# You can add backtrace silencers for libraries that you're using but don't wish to see in your backtraces.
-
# Rails.backtrace_cleaner.add_silencer { |line| line =~ /my_noisy_library/ }
-
-
# You can also remove all the silencers if you're trying to debug a problem that might stem from framework code.
-
# Rails.backtrace_cleaner.remove_silencers!
-
# frozen_string_literal: true
-
-
1
Bulkrax.setup do |config|
-
# Add local parsers
-
# config.parsers += [
-
# { name: 'MODS - My Local MODS parser', class_name: 'Bulkrax::ModsXmlParser', partial: 'mods_fields' },
-
# ]
-
-
# Field to use during import to identify if the Work or Collection already exists.
-
# Default is 'source'.
-
# config.system_identifier_field = 'source'
-
-
# WorkType to use as the default if none is specified in the import
-
# Default is the first returned by Hyrax.config.curation_concerns
-
1
config.default_work_type = 'DataSet'
-
-
# Path to store pending imports
-
# config.import_path = 'tmp/imports'
-
-
# Path to store exports before download
-
# config.export_path = 'tmp/exports'
-
-
# Server name for oai request header
-
# config.server_name = 'my_server@name.com'
-
-
# Field_mapping for establishing the source_identifier
-
# This value IS NOT used for OAI, so setting the OAI Entries here will have no effect
-
# The mapping is supplied per Entry, provide the full class name as a string, eg. 'Bulkrax::CsvEntry'
-
# Example:
-
# {
-
# 'Bulkrax::RdfEntry' => 'http://opaquenamespace.org/ns/identifier',
-
# 'Bulkrax::CsvEntry' => 'MyIdentifierField'
-
# }
-
# The default value for CSV is 'source_identifier'
-
-
# Field_mapping for establishing a parent-child relationship (FROM parent TO child)
-
# This can be a Collection to Work, or Work to Work relationship
-
# This value IS NOT used for OAI, so setting the OAI Entries here will have no effect
-
# The mapping is supplied per Entry, provide the full class name as a string, eg. 'Bulkrax::CsvEntry'
-
# Example:
-
# {
-
# 'Bulkrax::RdfEntry' => 'http://opaquenamespace.org/ns/contents',
-
# 'Bulkrax::CsvEntry' => 'children'
-
# }
-
# By default no parent-child relationships are added
-
# config.parent_child_field_mapping = { }
-
-
# Field_mapping for establishing a collection relationship (FROM work TO collection)
-
# This value IS NOT used for OAI, so setting the OAI parser here will have no effect
-
# The mapping is supplied per Entry, provide the full class name as a string, eg. 'Bulkrax::CsvEntry'
-
# The default value for CSV is collection
-
# Add/replace parsers, for example:
-
# config.collection_field_mapping['Bulkrax::RdfEntry'] = 'http://opaquenamespace.org/ns/set'
-
-
# Field mappings
-
# Create a completely new set of mappings by replacing the whole set as follows
-
1
config.field_mappings = {
-
"Bulkrax::CsvParser" => {
-
"source" => { from: ["DSpace handle"] },
-
"title" => { from: ["Title"] },
-
"creator" => { from: ["Creator"] },
-
"authoremail" => { from: ["Contact Information"] },
-
"description_abstract" => { from: ["Abstract"] },
-
"methodology" => { from: ["Methodology"] },
-
"description" => { from: ["Description"] },
-
"date_coverage" => { from: ["Date Coverage"] },
-
"rights_license" => { from: ["License"] },
-
"discipline" => { from: ["Discipline"] },
-
"fundedby" => { from: ["Funding Agency"] },
-
"keyword" => { from: ["Keyword"] },
-
"language" => { from: ["Language"] },
-
"referenced_by" => { from: ["Citation to related material"] },
-
"embargo_reason" => { from: ["Embargo"] },
-
"doi" => { from: ["DOI"] },
-
"resource_type" => { from: ["Type"] },
-
"publisher" => { from: ["Publisher"] },
-
"geo_location_box" => { from: ["geoLocationBox"] },
-
"geo_location_place" => { from: ["geoLocationPlace"] },
-
"curation_notes_user" => { from: ["Curation Note"] }
-
}
-
}
-
-
-
-
-
-
# Add to, or change existing mappings as follows
-
# e.g. to exclude date
-
# config.field_mappings["Bulkrax::OaiDcParser"]["date"] = { from: ["date"], excluded: true }
-
-
# To duplicate a set of mappings from one parser to another
-
# config.field_mappings["Bulkrax::OaiOmekaParser"] = {}
-
# config.field_mappings["Bulkrax::OaiDcParser"].each {|key,value| config.field_mappings["Bulkrax::OaiOmekaParser"][key] = value }
-
-
# Properties that should not be used in imports/exports. They are reserved for use by Hyrax.
-
# config.reserved_properties += ['my_field']
-
end
-
# frozen_string_literal: true
-
-
# ClamAV.instance.loaddb if defined? ClamAV
-
-
1
if defined? ClamAV && ENV['CI'] != 'true'
-
1
require "umich_clamav_daemon_scanner"
-
1
Hydra::Works.default_system_virus_scanner = UMichClamAVDaemonScanner
-
1
Rails.logger.info "Using ClamAV Daemon virus checker."
-
else
-
require "null_virus_scanner"
-
Hydra::Works.default_system_virus_scanner = NullVirusScanner
-
Rails.logger.warn "No virus checker in use."
-
end
-
# Initializer for the Config gem.
-
1
Config.setup do |config|
-
# Name of the constant exposing loaded settings
-
config.const_name = 'Settings'
-
# Use ENV settings
-
config.use_env = true
-
config.env_prefix = 'DATACORE'
-
config.env_separator = '__'
-
config.env_converter = :downcase
-
config.env_parse_values = true
-
end
-
# Be sure to restart your server when you modify this file.
-
-
# Specify a serializer for the signed and encrypted cookie jars.
-
# Valid options are :json, :marshal, and :hybrid.
-
1
Rails.application.config.action_dispatch.cookies_serializer = :json
-
# frozen_string_literal: true
-
-
1
require 'devise/behaviors/http_header_authenticatable_behavior'
-
1
require 'devise/strategies/http_header_authenticatable'
-
1
require 'devise/models/http_header_authenticatable'
-
-
# Use this hook to configure devise mailer, warden hooks and so forth.
-
# Many of these configuration options can be set straight in your model.
-
1
Devise.setup do |config|
-
# The secret key used by Devise. Devise uses this key to generate
-
# random tokens. Changing this key will render invalid all existing
-
# confirmation, reset password and unlock tokens in the database.
-
# Devise will use the `secret_key_base` as its `secret_key`
-
# by default. You can change it below and use your own secret key.
-
# config.secret_key = 'e7911530582fd2cbf68829186c8bec117cc78661bfe9fc3f8f2260fffb032bc1167ca4276dad2270891d2337c5e6b7f164776ac6c7178e9591bc9f2ef4af9590'
-
-
# ==> Controller configuration
-
# Configure the parent class to the devise controllers.
-
# config.parent_controller = 'DeviseController'
-
-
# ==> Mailer Configuration
-
# Configure the e-mail address which will be shown in Devise::Mailer,
-
# note that it will be overwritten if you use your own mailer class
-
# with default "from" parameter.
-
1
config.mailer_sender = 'please-change-me-at-config-initializers-devise@example.com'
-
-
# Configure the class responsible to send e-mails.
-
# config.mailer = 'Devise::Mailer'
-
-
# Configure the parent class responsible to send e-mails.
-
# config.parent_mailer = 'ActionMailer::Base'
-
-
# ==> ORM configuration
-
# Load and configure the ORM. Supports :active_record (default) and
-
# :mongoid (bson_ext recommended) by default. Other ORMs may be
-
# available as additional gems.
-
1
require 'devise/orm/active_record'
-
-
# ==> Configuration for any authentication mechanism
-
# Configure which keys are used when authenticating a user. The default is
-
# just :email. You can configure it to use [:username, :subdomain], so for
-
# authenticating a user, both parameters are required. Remember that those
-
# parameters are used only when authenticating and not when retrieving from
-
# session. If you need permissions, you should implement that in a before filter.
-
# You can also supply a hash where the value is a boolean determining whether
-
# or not authentication should be aborted when the value is not present.
-
# config.authentication_keys = [:email]
-
-
# Configure parameters from the request object used for authentication. Each entry
-
# given should be a request method and it will automatically be passed to the
-
# find_for_authentication method and considered in your model lookup. For instance,
-
# if you set :request_keys to [:subdomain], :subdomain will be used on authentication.
-
# The same considerations mentioned for authentication_keys also apply to request_keys.
-
# config.request_keys = []
-
-
# Configure which authentication keys should be case-insensitive.
-
# These keys will be downcased upon creating or modifying a user and when used
-
# to authenticate or find a user. Default is :email.
-
1
config.case_insensitive_keys = [:email]
-
-
# Configure which authentication keys should have whitespace stripped.
-
# These keys will have whitespace before and after removed upon creating or
-
# modifying a user and when used to authenticate or find a user. Default is :email.
-
1
config.strip_whitespace_keys = [:email]
-
-
# Tell if authentication through request.params is enabled. True by default.
-
# It can be set to an array that will enable params authentication only for the
-
# given strategies, for example, `config.params_authenticatable = [:database]` will
-
# enable it only for database (email + password) authentication.
-
# config.params_authenticatable = true
-
-
# Tell if authentication through HTTP Auth is enabled. False by default.
-
# It can be set to an array that will enable http authentication only for the
-
# given strategies, for example, `config.http_authenticatable = [:database]` will
-
# enable it only for database authentication. The supported strategies are:
-
# :database = Support basic authentication with authentication key + password
-
# config.http_authenticatable = false
-
-
# If 401 status code should be returned for AJAX requests. True by default.
-
# config.http_authenticatable_on_xhr = true
-
-
# The realm used in Http Basic Authentication. 'Application' by default.
-
# config.http_authentication_realm = 'Application'
-
-
# It will change confirmation, password recovery and other workflows
-
# to behave the same regardless if the e-mail provided was right or wrong.
-
# Does not affect registerable.
-
# config.paranoid = true
-
-
# By default Devise will store the user in session. You can skip storage for
-
# particular strategies by setting this option.
-
# Notice that if you are skipping storage for all authentication paths, you
-
# may want to disable generating routes to Devise's sessions controller by
-
# passing skip: :sessions to `devise_for` in your config/routes.rb
-
1
config.skip_session_storage = [:http_auth]
-
-
# By default, Devise cleans up the CSRF token on authentication to
-
# avoid CSRF token fixation attacks. This means that, when using AJAX
-
# requests for sign in and sign up, you need to get a new CSRF token
-
# from the server. You can disable this option at your own risk.
-
# config.clean_up_csrf_token_on_authentication = true
-
-
# When false, Devise will not attempt to reload routes on eager load.
-
# This can reduce the time taken to boot the app but if your application
-
# requires the Devise mappings to be loaded during boot time the application
-
# won't boot properly.
-
# config.reload_routes = true
-
-
# ==> Configuration for :database_authenticatable
-
# For bcrypt, this is the cost for hashing the password and defaults to 11. If
-
# using other algorithms, it sets how many times you want the password to be hashed.
-
#
-
# Limiting the stretches to just one in testing will increase the performance of
-
# your test suite dramatically. However, it is STRONGLY RECOMMENDED to not use
-
# a value less than 10 in other environments. Note that, for bcrypt (the default
-
# algorithm), the cost increases exponentially with the number of stretches (e.g.
-
# a value of 20 is already extremely slow: approx. 60 seconds for 1 calculation).
-
1
config.stretches = Rails.env.test? ? 1 : 11
-
-
# Set up a pepper to generate the hashed password.
-
# config.pepper = '58ffcd0df512470b62fbc76b30c5bc61ed67f3e8ca66e184f00727ce8e0e66e54abadafc6286564f3647e7baac353da55723a3a1b7f954f5638eb2e68009d0db'
-
-
# Send a notification to the original email when the user's email is changed.
-
# config.send_email_changed_notification = false
-
-
# Send a notification email when the user's password is changed.
-
# config.send_password_change_notification = false
-
-
# ==> Configuration for :confirmable
-
# A period that the user is allowed to access the website even without
-
# confirming their account. For instance, if set to 2.days, the user will be
-
# able to access the website for two days without confirming their account,
-
# access will be blocked just in the third day. Default is 0.days, meaning
-
# the user cannot access the website without confirming their account.
-
# config.allow_unconfirmed_access_for = 2.days
-
-
# A period that the user is allowed to confirm their account before their
-
# token becomes invalid. For example, if set to 3.days, the user can confirm
-
# their account within 3 days after the mail was sent, but on the fourth day
-
# their account can't be confirmed with the token any more.
-
# Default is nil, meaning there is no restriction on how long a user can take
-
# before confirming their account.
-
# config.confirm_within = 3.days
-
-
# If true, requires any email changes to be confirmed (exactly the same way as
-
# initial account confirmation) to be applied. Requires additional unconfirmed_email
-
# db field (see migrations). Until confirmed, new email is stored in
-
# unconfirmed_email column, and copied to email column on successful confirmation.
-
1
config.reconfirmable = true
-
-
# Defines which key will be used when confirming an account
-
# config.confirmation_keys = [:email]
-
-
# ==> Configuration for :rememberable
-
# The time the user will be remembered without asking for credentials again.
-
# config.remember_for = 2.weeks
-
-
# Invalidates all the remember me tokens when the user signs out.
-
1
config.expire_all_remember_me_on_sign_out = true
-
-
# If true, extends the user's remember period when remembered via cookie.
-
# config.extend_remember_period = false
-
-
# Options to be passed to the created cookie. For instance, you can set
-
# secure: true in order to force SSL only cookies.
-
# config.rememberable_options = {}
-
-
# ==> Configuration for :validatable
-
# Range for password length.
-
1
config.password_length = 6..128
-
-
# Email regex used to validate email formats. It simply asserts that
-
# one (and only one) @ exists in the given string. This is mainly
-
# to give user feedback and not to assert the e-mail validity.
-
1
config.email_regexp = /\A[^@\s]+@[^@\s]+\z/
-
-
# ==> Configuration for :timeoutable
-
# The time you want to timeout the user session without activity. After this
-
# time the user will be asked for credentials again. Default is 30 minutes.
-
# config.timeout_in = 30.minutes
-
-
# ==> Configuration for :lockable
-
# Defines which strategy will be used to lock an account.
-
# :failed_attempts = Locks an account after a number of failed attempts to sign in.
-
# :none = No lock strategy. You should handle locking by yourself.
-
# config.lock_strategy = :failed_attempts
-
-
# Defines which key will be used when locking and unlocking an account
-
# config.unlock_keys = [:email]
-
-
# Defines which strategy will be used to unlock an account.
-
# :email = Sends an unlock link to the user email
-
# :time = Re-enables login after a certain amount of time (see :unlock_in below)
-
# :both = Enables both strategies
-
# :none = No unlock strategy. You should handle unlocking by yourself.
-
# config.unlock_strategy = :both
-
-
# Number of authentication tries before locking an account if lock_strategy
-
# is failed attempts.
-
# config.maximum_attempts = 20
-
-
# Time interval to unlock the account if :time is enabled as unlock_strategy.
-
# config.unlock_in = 1.hour
-
-
# Warn on the last attempt before the account is locked.
-
# config.last_attempt_warning = true
-
-
# ==> Configuration for :recoverable
-
#
-
# Defines which key will be used when recovering the password for an account
-
# config.reset_password_keys = [:email]
-
-
# Time interval you can reset your password with a reset password key.
-
# Don't put a too small interval or your users won't have the time to
-
# change their passwords.
-
1
config.reset_password_within = 6.hours
-
-
# When set to false, does not sign a user in automatically after their password is
-
# reset. Defaults to true, so a user is signed in automatically after a reset.
-
# config.sign_in_after_reset_password = true
-
-
# ==> Configuration for :encryptable
-
# Allow you to use another hashing or encryption algorithm besides bcrypt (default).
-
# You can use :sha1, :sha512 or algorithms from others authentication tools as
-
# :clearance_sha1, :authlogic_sha512 (then you should set stretches above to 20
-
# for default behavior) and :restful_authentication_sha1 (then you should set
-
# stretches to 10, and copy REST_AUTH_SITE_KEY to pepper).
-
#
-
# Require the `devise-encryptable` gem when using anything other than bcrypt
-
# config.encryptor = :sha512
-
-
# ==> Scopes configuration
-
# Turn scoped views on. Before rendering "sessions/new", it will first check for
-
# "users/sessions/new". It's turned off by default because it's slower if you
-
# are using only default views.
-
# config.scoped_views = false
-
-
# Configure the default scope given to Warden. By default it's the first
-
# devise role declared in your routes (usually :user).
-
# config.default_scope = :user
-
-
# Set this configuration to false if you want /users/sign_out to sign out
-
# only the current scope. By default, Devise signs out all scopes.
-
# config.sign_out_all_scopes = true
-
-
# ==> Navigation configuration
-
# Lists the formats that should be treated as navigational. Formats like
-
# :html, should redirect to the sign in page when the user does not have
-
# access, but formats like :xml or :json, should return 401.
-
#
-
# If you have any extra navigational formats, like :iphone or :mobile, you
-
# should add them to the navigational formats lists.
-
#
-
# The "*/*" below is required to match Internet Explorer requests.
-
# config.navigational_formats = ['*/*', :html]
-
-
# The default HTTP method used to sign out a resource. Default is :delete.
-
1
config.sign_out_via = :get
-
-
# ==> OmniAuth
-
# Add a new OmniAuth provider. Check the wiki for more information on setting
-
# up on your models and hooks.
-
# config.omniauth :github, 'APP_ID', 'APP_SECRET', scope: 'user,public_repo'
-
-
# IU AUTH ONLY
-
1
config.omniauth :cas,
-
:host => Settings.omniauth.host,
-
:login_url => Settings.omniauth.login_url,
-
:service_validate_url => Settings.omniauth.service_validate_url,
-
:logout_url => Settings.omniauth.logout_url,
-
:ssl => true
-
-
# ==> Warden configuration
-
# If you want to use other strategies, that are not supported by Devise, or
-
# change the failure app, you can configure them inside the config.warden block.
-
#
-
# config.warden do |manager|
-
# manager.intercept_401 = false
-
# manager.default_strategies(scope: :user).unshift :some_external_strategy
-
# end
-
-
# ==> Mountable engine configurations
-
# When using Devise inside an engine, let's call it `MyEngine`, and this engine
-
# is mountable, there are some extra configurations to be taken into account.
-
# The following options are available, assuming the engine is mounted as:
-
#
-
# mount MyEngine, at: '/my_engine'
-
#
-
# The router that invoked `devise_for`, in the example above, would be:
-
# config.router_name = :my_engine
-
#
-
# When using OmniAuth, Devise cannot automatically set OmniAuth path,
-
# so you need to do it manually. For the users scope, it would be:
-
# config.omniauth_path_prefix = '/my_engine/users/auth'
-
end
-
-
# This doesn't seem to do anything
-
1
[ :after_create_concern,
-
:after_create_fileset,
-
:after_update_content,
-
:after_revert_content,
-
:after_update_metadata,
-
:after_import_local_file_success,
-
:after_import_local_file_failure,
-
:after_fixity_check_failure,
-
:after_destroy,
-
:after_import_url_success,
-
:after_import_url_failure
-
].each do |event_name|
-
-
11
Deepblue::LoggingService.new event_name: event_name
-
-
end
-
# frozen_string_literal: true
-
-
1
Ezid::Client.configure do |config|
-
1
config.host = Settings.ezid.host
-
1
config.port = Settings.ezid.port
-
1
config.user = Settings.ezid.user
-
1
config.password = Settings.ezid.password
-
1
config.timeout = Settings.ezid.timeout
-
1
config.default_shoulder = Settings.ezid.shoulder
-
end
-
# Be sure to restart your server when you modify this file.
-
-
# Configure sensitive parameters which will be filtered from the log file.
-
1
Rails.application.config.filter_parameters += [:password]
-
# windows doesn't properly require hydra-head (from the gemfile), so we need to require it explicitly here:
-
1
require 'hydra/head' unless defined? Hydra
-
-
1
Hydra.configure do |config|
-
# This specifies the solr field names of permissions-related fields.
-
# You only need to change these values if you've indexed permissions by some means other than the Hydra's built-in tooling.
-
# If you change these, you must also update the permissions request handler in your solrconfig.xml to return those values
-
#
-
# config.permissions.discover.group = ActiveFedora::SolrQueryBuilder.solr_name("discover_access_group", :symbol)
-
# config.permissions.discover.individual = ActiveFedora::SolrQueryBuilder.solr_name("discover_access_person", :symbol)
-
# config.permissions.read.group = ActiveFedora::SolrQueryBuilder.solr_name("read_access_group", :symbol)
-
# config.permissions.read.individual = ActiveFedora::SolrQueryBuilder.solr_name("read_access_person", :symbol)
-
# config.permissions.edit.group = ActiveFedora::SolrQueryBuilder.solr_name("edit_access_group", :symbol)
-
# config.permissions.edit.individual = ActiveFedora::SolrQueryBuilder.solr_name("edit_access_person", :symbol)
-
#
-
# config.permissions.embargo.release_date = ActiveFedora::SolrQueryBuilder.solr_name("embargo_release_date", :stored_sortable, type: :date)
-
# config.permissions.lease.expiration_date = ActiveFedora::SolrQueryBuilder.solr_name("lease_expiration_date", :stored_sortable, type: :date)
-
#
-
#
-
# Specify the user model
-
# config.user_model = 'User'
-
-
1
config.user_key_field = Devise.authentication_keys.first
-
end
-
# frozen_string_literal: true
-
-
1
Hyrax.config do |config|
-
-
1
config.register_curation_concern :data_set
-
1
config.register_curation_concern :dissertation
-
# config.register_curation_concern :generic_work
-
-
# Register roles that are expected by your implementation.
-
# @see Hyrax::RoleRegistry for additional details.
-
# @note there are magical roles as defined in Hyrax::RoleRegistry::MAGIC_ROLES
-
# config.register_roles do |registry|
-
# registry.add(name: 'captaining', description: 'For those that really like the front lines')
-
# end
-
-
# When an admin set is created, we need to activate a workflow.
-
# The :default_active_workflow_name is the name of the workflow we will activate.
-
# @see Hyrax::Configuration for additional details and defaults.
-
# config.default_active_workflow_name = 'default'
-
-
# Which RDF term should be used to relate objects to an admin set?
-
# If this is a new repository, you may want to set a custom predicate term here to
-
# avoid clashes if you plan to use the default (dct:isPartOf) for other relations.
-
# config.admin_set_predicate = ::RDF::DC.isPartOf
-
-
# Which RDF term should be used to relate objects to a rendering?
-
# If this is a new repository, you may want to set a custom predicate term here to
-
# avoid clashes if you plan to use the default (dct:hasFormat) for other relations.
-
# config.rendering_predicate = ::RDF::DC.hasFormat
-
-
# Email recipient of messages sent via the contact form
-
# config.contact_email = "repo-admin@example.org"
-
1
config.contact_email = Settings.hyrax.contact_email
-
-
# Text prefacing the subject entered in the contact form
-
# config.subject_prefix = "Contact form:"
-
-
# How many notifications should be displayed on the dashboard
-
# config.max_notifications_for_dashboard = 5
-
-
# How frequently should a file be fixity checked
-
# config.max_days_between_fixity_checks = 7
-
-
# Options to control the file uploader
-
# Run `bundle exec rake tmp:cache:clear` to clear all the contents of public/assets
-
# Run `bundle exec rake assets:clobber assets:precompile` in dev for this to take effect
-
1
config.uploader = {
-
limitConcurrentUploads: 6,
-
maxNumberOfFiles: 100,
-
# maxFileSize: 500.megabytes
-
maxFileSize: 2.gigabytes,
-
url: '/uploads'
-
}
-
-
# Enable displaying usage statistics in the UI
-
# Defaults to false
-
# Requires a Google Analytics id and OAuth2 keyfile. See README for more info
-
1
config.analytics = false
-
-
# Google Analytics tracking ID to gather usage statistics
-
# config.google_analytics_id = Rails.application.secrets.analytics_id
-
1
path = "#{Rails.application.config.paths["config"].existent.first}/analytics_id.yml"
-
1
if File.exist? path
-
Settings.analytics_id ||= Rails.application.config_for(:analytics_id)
-
config.google_analytics_id = Settings.analytics_id['analytics_id']
-
else
-
1
config.google_analytics_id = nil
-
end
-
-
# Date you wish to start collecting Google Analytic statistics for
-
# Leaving it blank will set the start date to when ever the file was uploaded by
-
# NOTE: if you have always sent analytics to GA for downloads and page views leave this commented out
-
# This is the date on the UMRDR WorkViewStat table record number 1
-
1
config.analytic_start_date = DateTime.new(2016, 4, 10 )
-
-
# Enables a link to the citations page for a work
-
# Default is false
-
1
config.citations = true
-
-
# Where to store tempfiles, leave blank for the system temp directory (e.g. /tmp)
-
# config.temp_file_base = '/home/developer1'
-
1
config.temp_file_base = File.join( Rails.root, 'tmp', 'derivatives') # rubocop:disable Rails/FilePath
-
-
# Hostpath to be used in Endnote exports
-
# config.persistent_hostpath = 'http://localhost/files/'
-
-
# If you have ffmpeg installed and want to transcode audio and video set to true
-
# config.enable_ffmpeg = false
-
-
# Hyrax uses NOIDs for files and collections instead of Fedora UUIDs
-
# where NOID = 10-character string and UUID = 32-character string w/ hyphens
-
# config.enable_noids = true
-
-
# Template for your repository's NOID IDs
-
# config.noid_template = ".reeddeeddk"
-
-
# Use the database-backed minter class
-
# config.noid_minter_class = Noid::Rails::Minter::Db
-
-
# Store identifier minter's state in a file for later replayability
-
# config.minter_statefile = '/tmp/minter-state'
-
1
config.minter_statefile = Settings.hyrax.minter_statefile
-
-
# Prefix for Redis keys
-
# config.redis_namespace = "hyrax"
-
1
config.redis_namespace = Settings.hyrax.redis_namespace
-
-
# Path to the file characterization tool
-
# config.fits_path = "fits.sh"
-
1
config.fits_path = system("which", "fits.sh") ? "fits.sh" : "/l/local/fits/fits.sh"
-
-
# Path to the file derivatives creation tool
-
# config.libreoffice_path = "soffice"
-
-
# Option to enable/disable full text extraction from PDFs
-
# Default is true, set to false to disable full text extraction
-
# config.extract_full_text = true
-
-
# How many seconds back from the current time that we should show by default of the user's activity on the user's dashboard
-
# config.activity_to_show_default_seconds_since_now = 24*60*60
-
-
# Hyrax can integrate with Zotero's Arkivo service for automatic deposit
-
# of Zotero-managed research items.
-
# config.arkivo_api = false
-
-
# Stream realtime notifications to users in the browser
-
# config.realtime_notifications = true
-
-
# Location autocomplete uses geonames to search for named regions
-
# Username for connecting to geonames
-
1
config.geonames_username = ''
-
-
# Should the acceptance of the licence agreement be active (checkbox), or
-
# implied when the save button is pressed? Set to true for active
-
# The default is true.
-
# config.active_deposit_agreement_acceptance = true
-
-
# Should work creation require file upload, or can a work be created first
-
# and a file added at a later time?
-
# The default is true.
-
1
config.work_requires_files = false
-
-
# Enable IIIF image service. This is required to use the
-
# UniversalViewer-ified show page
-
#
-
# If you have run the riiif generator, an embedded riiif service
-
# will be used to deliver images via IIIF. If you have not, you will
-
# need to configure the following other configuration values to work
-
# with your image server:
-
#
-
# * iiif_image_url_builder
-
# * iiif_info_url_builder
-
# * iiif_image_compliance_level_uri
-
# * iiif_image_size_default
-
#
-
# Default is false
-
# config.iiif_image_server = false
-
-
# Returns a URL that resolves to an image provided by a IIIF image server
-
1
config.iiif_image_url_builder = lambda do |file_id, base_url, size|
-
Riiif::Engine.routes.url_helpers.image_url(file_id, host: base_url, size: size)
-
end
-
# config.iiif_image_url_builder = lambda do |file_id, base_url, size|
-
# "#{base_url}/downloads/#{file_id.split('/').first}"
-
# end
-
-
# Returns a URL that resolves to an info.json file provided by a IIIF image server
-
1
config.iiif_info_url_builder = lambda do |file_id, base_url|
-
uri = Riiif::Engine.routes.url_helpers.info_url(file_id, host: base_url)
-
uri.sub(%r{/info\.json\Z}, '')
-
end
-
# config.iiif_info_url_builder = lambda do |_, _|
-
# ""
-
# end
-
-
# Returns a URL that indicates your IIIF image server compliance level
-
# config.iiif_image_compliance_level_uri = 'http://iiif.io/api/image/2/level2.json'
-
-
# Returns a IIIF image size default
-
# config.iiif_image_size_default = '600,'
-
-
# Fields to display in the IIIF metadata section; default is the required fields
-
# config.iiif_metadata_fields = Hyrax::Forms::WorkForm.required_fields
-
-
# Should a button with "Share my work" show on the front page to all users (even those not logged in)?
-
# config.display_share_button_when_not_logged_in = true
-
-
# The user who runs batch jobs. Update this if you aren't using emails
-
# config.batch_user_key = 'batchuser@example.com'
-
-
# The user who runs fixity check jobs. Update this if you aren't using emails
-
# config.audit_user_key = 'audituser@example.com'
-
#
-
# The banner image. Should be 5000px wide by 1000px tall
-
# config.banner_image = 'https://cloud.githubusercontent.com/assets/92044/18370978/88ecac20-75f6-11e6-8399-6536640ef695.jpg'
-
-
# Temporary paths to hold uploads before they are ingested into FCrepo
-
# These must be lambdas that return a Pathname. Can be configured separately
-
# config.upload_path = ->() { Rails.root + 'tmp' + 'uploads' }
-
# config.cache_path = ->() { Rails.root + 'tmp' + 'uploads' + 'cache' }
-
-
# Location on local file system where derivatives will be stored
-
# If you use a multi-server architecture, this MUST be a shared volume
-
# config.derivatives_path = Rails.root.join('tmp', 'derivatives')
-
-
# Should schema.org microdata be displayed?
-
# config.display_microdata = true
-
-
# What default microdata type should be used if a more appropriate
-
# type can not be found in the locale file?
-
# config.microdata_default_type = 'http://schema.org/CreativeWork'
-
-
# Location on local file system where uploaded files will be staged
-
# prior to being ingested into the repository or having derivatives generated.
-
# If you use a multi-server architecture, this MUST be a shared volume.
-
# config.working_path = Rails.root.join( 'tmp', 'uploads')
-
-
# Should the media display partial render a download link?
-
# config.display_media_download_link = true
-
-
# A configuration point for changing the behavior of the license service
-
# @see Hyrax::LicenseService for implementation details
-
# config.license_service_class = Hyrax::LicenseService
-
-
# Labels for display of permission levels
-
# config.permission_levels = { "View/Download" => "read", "Edit access" => "edit" }
-
-
# Labels for permission level options used in dropdown menus
-
# config.permission_options = { "Choose Access" => "none", "View/Download" => "read", "Edit" => "edit" }
-
-
# Labels for owner permission levels
-
# config.owner_permission_levels = { "Edit Access" => "edit" }
-
-
# Path to the ffmpeg tool
-
# config.ffmpeg_path = 'ffmpeg'
-
-
# Max length of FITS messages to display in UI
-
# config.fits_message_length = 5
-
-
# ActiveJob queue to handle ingest-like jobs
-
# config.ingest_queue_name = :default
-
1
config.ingest_queue_name = :ingest
-
-
## Attributes for the lock manager which ensures a single process/thread is mutating a ore:Aggregation at once.
-
# How many times to retry to acquire the lock before raising UnableToAcquireLockError
-
# config.lock_retry_count = 600 # Up to 2 minutes of trying at intervals up to 200ms
-
#
-
# Maximum wait time in milliseconds before retrying. Wait time is a random value between 0 and retry_delay.
-
# config.lock_retry_delay = 200
-
#
-
# How long to hold the lock in milliseconds
-
# config.lock_time_to_live = 60_000
-
-
## Do not alter unless you understand how ActiveFedora handles URI/ID translation
-
# config.translate_id_to_uri = lambda do |uri|
-
# baseparts = 2 + [(Noid::Rails::Config.template.gsub(/\.[rsz]/, '').length.to_f / 2).ceil, 4].min
-
# uri.to_s.sub(baseurl, '').split('/', baseparts).last
-
# end
-
# config.translate_uri_to_id = lambda do |id|
-
# "#{ActiveFedora.fedora.host}#{ActiveFedora.fedora.base_path}/#{Noid::Rails.treeify(id)}"
-
# end
-
-
## Fedora import/export tool
-
#
-
# Path to the Fedora import export tool jar file
-
# config.import_export_jar_file_path = "tmp/fcrepo-import-export.jar"
-
#
-
# Location where BagIt files should be exported
-
# config.bagit_dir = "tmp/descriptions"
-
-
# This enables or disables the ability to download files.
-
1
config.define_singleton_method(:download_files) do
-
return true
-
end
-
-
# If browse-everything has been configured, load the configs. Otherwise, set to nil.
-
begin
-
1
if defined? BrowseEverything
-
1
config.browse_everything = BrowseEverything.config
-
else
-
Rails.logger.warn "BrowseEverything is not installed"
-
end
-
rescue Errno::ENOENT
-
config.browse_everything = nil
-
end
-
-
## Whitelist all directories which can be used to ingest from the local file
-
# system.
-
#
-
# Any file, and only those, that is anywhere under one of the specified
-
# directories can be used by CreateWithRemoteFilesActor to add local files
-
# to works. Files uploaded by the user are handled separately and the
-
# temporary directory for those need not be included here.
-
#
-
# Default value includes BrowseEverything.config['file_system'][:home] if it
-
# is set, otherwise default is an empty list. You should only need to change
-
# this if you have custom ingestions using CreateWithRemoteFilesActor to
-
# ingest files from the file system that are not part of the BrowseEverything
-
# mount point.
-
#
-
# config.whitelisted_ingest_dirs = []
-
-
# rubocop#:disable Rails/Output
-
# Doing this before adding actors freezes the stack
-
# # print out the actor stack
-
# actor = Hyrax::CurationConcern.actor
-
# puts "Hyrax::CurationConcern.actor stack"
-
# loop do
-
# puts "actor: #{actor.class.name}"
-
# break if actor.nil?
-
# break unless actor.respond_to? :next_actor
-
# actor = actor.next_actor
-
# end
-
-
# see Hyrax::DefaultMiddlewareStack.build_stack
-
1
Hyrax::CurationConcern.actor_factory.insert_after Hyrax::Actors::OptimisticLockValidator, Hyrax::Actors::AfterOptimisticLockValidator
-
1
Hyrax::CurationConcern.actor_factory.insert_after Hyrax::Actors::CollectionsMembershipActor, Hyrax::Actors::BeforeAddToWorkActor
-
1
Hyrax::CurationConcern.actor_factory.insert_after Hyrax::Actors::AddToWorkActor, Hyrax::Actors::BeforeAttachMembersActor
-
1
Hyrax::CurationConcern.actor_factory.insert_after Hyrax::Actors::FeaturedWorkActor, Hyrax::Actors::BeforeModelActor
-
-
# turn this on to see verify the stack
-
# actor = Hyrax::CurationConcern.actor
-
# puts "Hyrax::CurationConcern.actor stack after inserts"
-
# loop do
-
# puts "#{actor.class.name}"
-
# break if actor.nil?
-
# break unless actor.respond_to? :next_actor
-
# actor = actor.next_actor
-
# end
-
# rubocop#:enable Rails/Output
-
-
# override the path used for branding
-
# the rest of the code assumes that the this path is symlinked to public/branding
-
1
config.instance_variable_set( :@branding_path, Rails.root.join( Settings.relative_url_root, 'branding' ) )
-
-
end
-
-
1
Date::DATE_FORMATS[:standard] = "%m/%d/%Y"
-
-
1
Qa::Authorities::Local.register_subauthority('subjects', 'Qa::Authorities::Local::TableBasedAuthority')
-
1
Qa::Authorities::Local.register_subauthority('languages', 'Qa::Authorities::Local::TableBasedAuthority')
-
1
Qa::Authorities::Local.register_subauthority('genres', 'Qa::Authorities::Local::TableBasedAuthority')
-
-
# set bulkrax default work type to first curation_concern if it isn't already set
-
1
if Bulkrax.default_work_type.blank?
-
Bulkrax.default_work_type = Hyrax.config.curation_concerns.first.to_s
-
end
-
# # These events are triggered by actions within Hyrax Actors
-
# Hyrax.config.callback.set(:after_create_concern) do |curation_concern, user|
-
# ContentDepositEventJob.perform_later(curation_concern, user)
-
# Deepblue::EventHelper.after_create_concern_callback( curation_concern: curation_concern, user: user )
-
# end
-
-
# Hyrax.config.callback.set(:after_create_fileset) do |file_set, user|
-
# FileSetAttachedEventJob.perform_later(file_set, user)
-
# Deepblue::EventHelper.after_create_fileset_callback( file_set: file_set, user: user )
-
# end
-
-
# Hyrax.config.callback.set(:after_revert_content) do |file_set, user, revision|
-
# ContentRestoredVersionEventJob.perform_later(file_set, user, revision)
-
# Deepblue::EventHelper.after_revert_content_callback( file_set: file_set, user: user )
-
# end
-
-
# # :after_update_content callback replaced by after_perform block in IngestJob
-
-
# Hyrax.config.callback.set(:after_update_metadata) do |curation_concern, user|
-
# ContentUpdateEventJob.perform_later(curation_concern, user)
-
# Deepblue::EventHelper.after_update_metadata_callback( curation_concern: curation_concern, user: user )
-
# end
-
-
# Hyrax.config.callback.set(:after_destroy) do |id, user|
-
# ContentDeleteEventJob.perform_later(id, user)
-
# Deepblue::EventHelper.after_destroy_callback( id: id, user: user )
-
# end
-
-
# Hyrax.config.callback.set(:after_fixity_check_failure) do |file_set, checksum_audit_log:|
-
# Hyrax::FixityCheckFailureService.new(file_set, checksum_audit_log: checksum_audit_log).call
-
# Deepblue::EventHelper.after_fixity_check_failure_callback( file_set: file_set, checksum_audit_log: checksum_audit_log )
-
# end
-
-
# Hyrax.config.callback.set(:after_batch_create_success) do |user|
-
# Hyrax::BatchCreateSuccessService.new(user).call
-
# Deepblue::EventHelper.after_batch_create_succes_callback( curation_concern: curation_concern, user: user )
-
# end
-
-
# Hyrax.config.callback.set(:after_batch_create_failure) do |user, messages|
-
# Hyrax::BatchCreateFailureService.new(user, messages).call
-
# Deepblue::EventHelper.after_batch_create_failure_callback( user: user, msg: messages )
-
# end
-
-
# Hyrax.config.callback.set(:after_import_url_success) do |file_set, user|
-
# # ImportUrlSuccessService was removed here since it's duplicative of
-
# # the :after_create_fileset notification
-
# end
-
-
# Hyrax.config.callback.set(:after_import_url_failure) do |file_set, user|
-
# Hyrax::ImportUrlFailureService.new(file_set, user).call
-
# Deepblue::EventHelper.after_import_url_failure_callback( file_set: file_set, user: user )
-
# end
-
# Be sure to restart your server when you modify this file.
-
-
# Add new inflection rules using the following format. Inflections
-
# are locale specific, and you may define rules for as many different
-
# locales as you wish. All of these examples are active by default:
-
# ActiveSupport::Inflector.inflections(:en) do |inflect|
-
# inflect.plural /^(ox)$/i, '\1en'
-
# inflect.singular /^(ox)en/i, '\1'
-
# inflect.irregular 'person', 'people'
-
# inflect.uncountable %w( fish sheep )
-
# end
-
-
# These inflection rules are supported but not enabled by default:
-
# ActiveSupport::Inflector.inflections(:en) do |inflect|
-
# inflect.acronym 'RESTful'
-
# end
-
1
LDAPGroupsLookup.config = {
-
enabled: Settings.ldap[:enabled],
-
config: { host: Settings.ldap[:host],
-
port: Settings.ldap[:port] || 636,
-
encryption: {
-
method: :simple_tls,
-
tls_options: OpenSSL::SSL::SSLContext::DEFAULT_PARAMS,
-
},
-
auth: {
-
method: :simple,
-
username: "cn=#{Settings.ldap[:user]}",
-
password: Settings.ldap[:pass],
-
}
-
},
-
tree: Settings.ldap[:tree],
-
account_ou: Settings.ldap[:account_ou],
-
group_ou: Settings.ldap[:group_ou]
-
}
-
1
Mailboxer.setup do |config|
-
-
#Configures if your application uses or not email sending for Notifications and Messages
-
1
config.uses_emails = true
-
-
#Configures the default from for emails sent for Messages and Notifications
-
1
config.default_from = "no-reply@mailboxer.com"
-
-
#Configures the methods needed by mailboxer
-
1
config.email_method = :mailboxer_email
-
1
config.name_method = :name
-
1
config.notify_method = :notify
-
-
#Configures if you use or not a search engine and which one you are using
-
#Supported engines: [:solr,:sphinx,:pg_search]
-
1
config.search_enabled = false
-
1
config.search_engine = :solr
-
-
#Configures maximum length of the message subject and body
-
1
config.subject_max_length = 255
-
1
config.body_max_length = 32000
-
end
-
# Be sure to restart your server when you modify this file.
-
-
# Add new mime types for use in respond_to blocks:
-
# Mime::Type.register "text/richtext", :rtf
-
1
Mime::Type.register "application/n-triples", :nt
-
1
Mime::Type.register "application/ld+json", :jsonld
-
1
Mime::Type.register "text/turtle", :ttl
-
1
Mime::Type.register 'application/x-endnote-refer', :endnote
-
1
require 'mini_magick'
-
-
1
MiniMagick.configure do |config|
-
1
config.shell_api = "posix-spawn"
-
end
-
# Be sure to restart your server when you modify this file.
-
#
-
# This file contains migration options to ease your Rails 5.0 upgrade.
-
#
-
# Read the Guide for Upgrading Ruby on Rails for more info on each option.
-
-
# Enable per-form CSRF tokens. Previous versions had false.
-
1
Rails.application.config.action_controller.per_form_csrf_tokens = true
-
-
# Enable origin-checking CSRF mitigation. Previous versions had false.
-
1
Rails.application.config.action_controller.forgery_protection_origin_check = true
-
-
# Make Ruby 2.4 preserve the timezone of the receiver when calling `to_time`.
-
# Previous versions had false.
-
1
ActiveSupport.to_time_preserves_timezone = true
-
-
# Require `belongs_to` associations by default. Previous versions had false.
-
1
Rails.application.config.active_record.belongs_to_required_by_default = true
-
-
# Do not halt callback chains when a callback returns false. Previous versions had true.
-
# ActiveSupport.halt_callback_chains_on_return_false = false
-
-
# Configure SSL options to enable HSTS with subdomains. Previous versions had false.
-
1
Rails.application.config.ssl_options = { hsts: { subdomains: true } }
-
# frozen_string_literal: true
-
-
1
OkComputer.mount_at = 'status'
-
-
1
OkComputer.logger = Logger.new('log/status.log')
-
# frozen_string_literal: true
-
-
# Be sure to restart your server when you modify this file.
-
-
1
DeepBlueDocs::Application.config.after_initialize do
-
# Rails.logger.info "Initializing provenance logging..."
-
# STDOUT.puts "Initializing provenance logging..."
-
# require 'provenance_persistence'
-
# ActiveFedora::Persistence.prepend( ::Deepblue::ProvenancePersistenceExt )
-
end
-
1
Rack::Attack.enabled = Settings.dig(:rack_attack, :enabled) || false
-
-
1
Rack::Attack.safelist('Safe') do |req|
-
Datacore::RackAttackConfig.safe_req?(req)
-
end
-
-
1
Rack::Attack.blocklist('Blocked') do |req|
-
Datacore::RackAttackConfig.block_req?(req)
-
end
-
-
1
Rack::Attack.throttle('Throttled',
-
limit: Settings.dig(:rack_attack, :throttle_limit) || 100,
-
period: Settings.dig(:rack_attack, :throttle_period) || 2.minutes) do |req|
-
Datacore::RackAttackConfig.throttle_req?(req)
-
end
-
1
require 'redis'
-
1
Redis.current = Redis.new(Settings.redis.to_h)
-
1
require 'resque'
-
-
-
# We make another client here with the same options as Redis.current, though
-
# we may be able to use it directly.
-
1
Resque.redis = Redis.new(Settings.redis.to_h)
-
1
Resque.redis.namespace = Settings.hyrax.redis_namespace
-
1
Resque.inline = Rails.env.test?
-
1
Riiif::Image.file_resolver = Riiif::HTTPFileResolver.new
-
1
Riiif::Image.info_service = lambda do |id, _file|
-
# id will look like a path to a pcdm:file
-
# (e.g. rv042t299%2Ffiles%2F6d71677a-4f80-42f1-ae58-ed1063fd79c7)
-
# but we just want the id for the FileSet it's attached to.
-
-
# Capture everything before the first slash
-
fs_id = id.sub(/\A([^\/]*)\/.*/, '\1')
-
resp = ActiveFedora::SolrService.get("id:#{fs_id}")
-
doc = resp['response']['docs'].first
-
raise "Unable to find solr document with id:#{fs_id}" unless doc
-
{ height: doc['height_is'], width: doc['width_is'] }
-
end
-
-
1
Riiif::Image.file_resolver.id_to_uri = lambda do |id|
-
ActiveFedora::Base.id_to_uri(CGI.unescape(id)).tap do |url|
-
Rails.logger.info "Riiif resolved #{id} to #{url}"
-
end
-
end
-
-
1
Riiif::Image.authorization_service = Hyrax::IIIFAuthorizationService
-
-
1
Riiif.not_found_image = Rails.root.join('app', 'assets', 'images', 'us_404.svg')
-
1
Riiif.unauthorized_image = Rails.root.join('app', 'assets', 'images', 'us_404.svg')
-
-
1
Riiif::Engine.config.cache_duration_in_days = 365
-
# Be sure to restart your server when you modify this file.
-
-
1
Rails.application.config.session_store :cookie_store, key: '_deep_blue_docs_session'
-
# NOTE: This is a modified version of simple_form's default config file.
-
# The only changes were to move the input to after the hint and error.
-
-
# Use this setup block to configure all options available in SimpleForm.
-
1
SimpleForm.setup do |config|
-
# Wrappers are used by the form builder to generate a
-
# complete input. You can remove any component from the
-
# wrapper, change the order or even add your own to the
-
# stack. The options given below are used to wrap the
-
# whole input.
-
1
config.wrappers :default, class: :input,
-
hint_class: :field_with_hint,
-
error_class: :field_with_errors do |b|
-
## Extensions enabled by default
-
# Any of these extensions can be disabled for a
-
# given input by passing: `f.input EXTENSION_NAME => false`.
-
# You can make any of these extensions optional by
-
# renaming `b.use` to `b.optional`.
-
-
# Determines whether to use HTML5 (:email, :url, ...)
-
# and required attributes
-
1
b.use :html5
-
-
# Calculates placeholders automatically from I18n
-
# You can also pass a string as f.input placeholder: "Placeholder"
-
1
b.use :placeholder
-
-
## Optional extensions
-
# They are disabled unless you pass `f.input EXTENSION_NAME => true`
-
# to the input. If so, they will retrieve the values from the model
-
# if any exists. If you want to enable any of those
-
# extensions by default, you can change `b.optional` to `b.use`.
-
-
# Calculates maxlength from length validations for string inputs
-
1
b.optional :maxlength
-
-
# Calculates pattern from format validations for string inputs
-
1
b.optional :pattern
-
-
# Calculates min and max from length validations for numeric inputs
-
1
b.optional :min_max
-
-
# Calculates readonly automatically from readonly attributes
-
1
b.optional :readonly
-
-
## Inputs
-
1
b.use :label
-
1
b.use :hint, wrap_with: { tag: :span, class: :hint }
-
1
b.use :error, wrap_with: { tag: :span, class: :error }
-
1
b.use :input
-
-
## full_messages_for
-
# If you want to display the full error message for the attribute, you can
-
# use the component :full_error, like:
-
#
-
# b.use :full_error, wrap_with: { tag: :span, class: :error }
-
end
-
-
# The default wrapper to be used by the FormBuilder.
-
1
config.default_wrapper = :default
-
-
# Define the way to render check boxes / radio buttons with labels.
-
# Defaults to :nested for bootstrap config.
-
# inline: input + label
-
# nested: label > input
-
1
config.boolean_style = :nested
-
-
# Default class for buttons
-
1
config.button_class = 'btn'
-
-
# Method used to tidy up errors. Specify any Rails Array method.
-
# :first lists the first message for each field.
-
# Use :to_sentence to list all errors for each field.
-
# config.error_method = :first
-
-
# Default tag used for error notification helper.
-
1
config.error_notification_tag = :div
-
-
# CSS class to add for error notification helper.
-
1
config.error_notification_class = 'error_notification'
-
-
# ID to add for error notification helper.
-
# config.error_notification_id = nil
-
-
# Series of attempts to detect a default label method for collection.
-
# config.collection_label_methods = [ :to_label, :name, :title, :to_s ]
-
-
# Series of attempts to detect a default value method for collection.
-
# config.collection_value_methods = [ :id, :to_s ]
-
-
# You can wrap a collection of radio/check boxes in a pre-defined tag, defaulting to none.
-
# config.collection_wrapper_tag = nil
-
-
# You can define the class to use on all collection wrappers. Defaulting to none.
-
# config.collection_wrapper_class = nil
-
-
# You can wrap each item in a collection of radio/check boxes with a tag,
-
# defaulting to :span.
-
# config.item_wrapper_tag = :span
-
-
# You can define a class to use in all item wrappers. Defaulting to none.
-
# config.item_wrapper_class = nil
-
-
# How the label text should be generated altogether with the required text.
-
1
config.label_text = ->(label, required, _) { "#{label} #{required}" }
-
-
# You can define the class to use on all labels. Default is nil.
-
# config.label_class = nil
-
-
# You can define the default class to be used on forms. Can be overriden
-
# with `html: { :class }`. Defaulting to none.
-
# config.default_form_class = nil
-
-
# You can define which elements should obtain additional classes
-
# config.generate_additional_classes_for = [:wrapper, :label, :input]
-
-
# Whether attributes are required by default (or not). Default is true.
-
# config.required_by_default = true
-
-
# Tell browsers whether to use the native HTML5 validations (novalidate form option).
-
# These validations are enabled in SimpleForm's internal config but disabled by default
-
# in this configuration, which is recommended due to some quirks from different browsers.
-
# To stop SimpleForm from generating the novalidate option, enabling the HTML5 validations,
-
# change this configuration to true.
-
1
config.browser_validations = true
-
-
# Collection of methods to detect if a file type was given.
-
# config.file_methods = [ :mounted_as, :file?, :public_filename ]
-
-
# Custom mappings for input types. This should be a hash containing a regexp
-
# to match as key, and the input type that will be used when the field name
-
# matches the regexp as value.
-
# config.input_mappings = { /count/ => :integer }
-
-
# Custom wrappers for input types. This should be a hash containing an input
-
# type as key and the wrapper that will be used for all inputs with specified type.
-
# config.wrapper_mappings = { string: :prepend }
-
-
# Namespaces where SimpleForm should look for custom input classes that
-
# override default inputs.
-
# config.custom_inputs_namespaces << "CustomInputs"
-
-
# Default priority for time_zone inputs.
-
# config.time_zone_priority = nil
-
-
# Default priority for country inputs.
-
# config.country_priority = nil
-
-
# When false, do not use translations for labels.
-
# config.translate_labels = true
-
-
# Automatically discover new inputs in Rails' autoload path.
-
# config.inputs_discovery = true
-
-
# Cache SimpleForm inputs discovery
-
# config.cache_discovery = !Rails.env.development?
-
-
# Default class for inputs
-
# config.input_class = nil
-
-
# Define the default class of the input wrapper of the boolean input.
-
1
config.boolean_label_class = 'checkbox'
-
-
# Defines if the default input wrapper class should be included in radio
-
# collection wrappers.
-
# config.include_default_input_wrapper_class = true
-
-
# Defines which i18n scope will be used in Simple Form.
-
# config.i18n_scope = 'simple_form'
-
end
-
# NOTE: This is a modified version of simple_form's default config file.
-
# The only changes were to move the inputs to after the hints and errors.
-
-
# Use this setup block to configure all options available in SimpleForm.
-
-
1
SimpleForm.setup do |config|
-
1
config.error_notification_class = 'alert alert-danger'
-
1
config.button_class = 'btn btn-default'
-
1
config.boolean_label_class = nil
-
-
1
config.wrappers :vertical_form, tag: 'div', class: 'form-group', error_class: 'has-error' do |b|
-
1
b.use :html5
-
1
b.use :placeholder
-
1
b.optional :maxlength
-
1
b.optional :pattern
-
1
b.optional :min_max
-
1
b.optional :readonly
-
1
b.use :label, class: 'control-label'
-
1
b.use :error, wrap_with: { tag: 'span', class: 'help-block' }
-
1
b.use :hint, wrap_with: { tag: 'p', class: 'help-block' }
-
1
b.use :input, class: 'form-control'
-
end
-
-
1
config.wrappers :vertical_file_input, tag: 'div', class: 'form-group', error_class: 'has-error' do |b|
-
1
b.use :html5
-
1
b.use :placeholder
-
1
b.optional :maxlength
-
1
b.optional :readonly
-
1
b.use :label, class: 'control-label'
-
1
b.use :error, wrap_with: { tag: 'span', class: 'help-block' }
-
1
b.use :hint, wrap_with: { tag: 'p', class: 'help-block' }
-
1
b.use :input
-
end
-
-
1
config.wrappers :vertical_boolean, tag: 'div', class: 'form-group', error_class: 'has-error' do |b|
-
1
b.use :html5
-
1
b.optional :readonly
-
-
1
b.wrapper tag: 'div', class: 'checkbox' do |ba|
-
1
ba.use :label_input
-
end
-
-
1
b.use :error, wrap_with: { tag: 'span', class: 'help-block' }
-
1
b.use :hint, wrap_with: { tag: 'p', class: 'help-block' }
-
end
-
-
1
config.wrappers :vertical_radio_and_checkboxes, tag: 'div', class: 'form-group', error_class: 'has-error' do |b|
-
1
b.use :html5
-
1
b.optional :readonly
-
1
b.use :label, class: 'control-label'
-
1
b.use :error, wrap_with: { tag: 'span', class: 'help-block' }
-
1
b.use :hint, wrap_with: { tag: 'p', class: 'help-block' }
-
1
b.use :input
-
end
-
-
1
config.wrappers :horizontal_form, tag: 'div', class: 'form-group', error_class: 'has-error' do |b|
-
1
b.use :html5
-
1
b.use :placeholder
-
1
b.optional :maxlength
-
1
b.optional :pattern
-
1
b.optional :min_max
-
1
b.optional :readonly
-
1
b.use :label, class: 'col-sm-3 control-label'
-
-
1
b.wrapper tag: 'div', class: 'col-sm-9' do |ba|
-
1
ba.use :error, wrap_with: { tag: 'span', class: 'help-block' }
-
1
ba.use :hint, wrap_with: { tag: 'p', class: 'help-block' }
-
1
ba.use :input, class: 'form-control'
-
end
-
end
-
-
1
config.wrappers :horizontal_file_input, tag: 'div', class: 'form-group', error_class: 'has-error' do |b|
-
1
b.use :html5
-
1
b.use :placeholder
-
1
b.optional :maxlength
-
1
b.optional :readonly
-
1
b.use :label, class: 'col-sm-3 control-label'
-
-
1
b.wrapper tag: 'div', class: 'col-sm-9' do |ba|
-
1
ba.use :error, wrap_with: { tag: 'span', class: 'help-block' }
-
1
ba.use :hint, wrap_with: { tag: 'p', class: 'help-block' }
-
1
ba.use :input
-
end
-
end
-
-
1
config.wrappers :horizontal_boolean, tag: 'div', class: 'form-group', error_class: 'has-error' do |b|
-
1
b.use :html5
-
1
b.optional :readonly
-
-
1
b.wrapper tag: 'div', class: 'col-sm-offset-3 col-sm-9' do |wr|
-
1
wr.wrapper tag: 'div', class: 'checkbox' do |ba|
-
1
ba.use :label_input
-
end
-
-
1
wr.use :error, wrap_with: { tag: 'span', class: 'help-block' }
-
1
wr.use :hint, wrap_with: { tag: 'p', class: 'help-block' }
-
end
-
end
-
-
1
config.wrappers :horizontal_radio_and_checkboxes, tag: 'div', class: 'form-group', error_class: 'has-error' do |b|
-
1
b.use :html5
-
1
b.optional :readonly
-
-
1
b.use :label, class: 'col-sm-3 control-label'
-
-
1
b.wrapper tag: 'div', class: 'col-sm-9' do |ba|
-
1
ba.use :error, wrap_with: { tag: 'span', class: 'help-block' }
-
1
ba.use :hint, wrap_with: { tag: 'p', class: 'help-block' }
-
1
ba.use :input
-
end
-
end
-
-
1
config.wrappers :inline_form, tag: 'div', class: 'form-group', error_class: 'has-error' do |b|
-
1
b.use :html5
-
1
b.use :placeholder
-
1
b.optional :maxlength
-
1
b.optional :pattern
-
1
b.optional :min_max
-
1
b.optional :readonly
-
1
b.use :label, class: 'sr-only'
-
1
b.use :error, wrap_with: { tag: 'span', class: 'help-block' }
-
1
b.use :hint, wrap_with: { tag: 'p', class: 'help-block' }
-
1
b.use :input, class: 'form-control'
-
end
-
-
1
config.wrappers :multi_select, tag: 'div', class: 'form-group', error_class: 'has-error' do |b|
-
1
b.use :html5
-
1
b.optional :readonly
-
1
b.use :label, class: 'control-label'
-
1
b.wrapper tag: 'div', class: 'form-inline' do |ba|
-
1
ba.use :error, wrap_with: { tag: 'span', class: 'help-block' }
-
1
ba.use :hint, wrap_with: { tag: 'p', class: 'help-block' }
-
1
ba.use :input, class: 'form-control'
-
end
-
end
-
# Wrappers for forms and inputs using the Bootstrap toolkit.
-
# Check the Bootstrap docs (http://getbootstrap.com)
-
# to learn about the different styles for forms and inputs,
-
# buttons and other elements.
-
1
config.default_wrapper = :vertical_form
-
1
config.wrapper_mappings = {
-
check_boxes: :vertical_radio_and_checkboxes,
-
radio_buttons: :vertical_radio_and_checkboxes,
-
file: :vertical_file_input,
-
boolean: :vertical_boolean,
-
datetime: :multi_select,
-
date: :multi_select,
-
time: :multi_select
-
}
-
end
-
# Be sure to restart your server when you modify this file.
-
-
# This file contains settings for ActionController::ParamsWrapper which
-
# is enabled by default.
-
-
# Enable parameter wrapping for JSON. You can disable this by setting :format to an empty array.
-
1
ActiveSupport.on_load(:action_controller) do
-
2
wrap_parameters format: [:json]
-
end
-
-
# To enable root element in JSON for ActiveRecord objects.
-
# ActiveSupport.on_load(:active_record) do
-
# self.include_root_in_json = true
-
# end
-
# frozen_string_literal: true
-
-
1
resque_web_constraint = lambda do |request|
-
current_user = request.env['warden'].user
-
ability = Ability.new current_user
-
rv = ability.present? && ability.respond_to?(:admin?) && ability.admin?
-
rv
-
end
-
-
1
Rails.application.routes.draw do
-
1
mount Bulkrax::Engine, at: '/'
-
1
mount BrowseEverything::Engine => '/browse'
-
-
# block Blacklight bookmark routes
-
1
get '/bookmarks', to: 'application#rescue_404'
-
1
post '/bookmarks', to: 'application#rescue_404'
-
1
get '/bookmarks/*all', to: 'application#rescue_404'
-
1
post '/bookmarks/*all', to: 'application#rescue_404'
-
-
1
mount Blacklight::Engine => '/'
-
-
1
get '/concern/generic_works/*rest', to: redirect( '/data/concern/data_sets/%{rest}', status: 302 )
-
-
1
get ':action' => 'hyrax/static#:action', constraints: { action: %r{
-
about|
-
agreement|
-
dbd-documentation-guide|
-
dbd-glossary|
-
file-format-preservation|
-
globus-help|
-
help|
-
how-to-upload|
-
management-plan-text|
-
mendeley|
-
metadata-guidance|
-
prepare-your-data|
-
retention|
-
subject_libraries|
-
support-for-depositors|
-
terms|
-
use-downloaded-data|
-
versions|
-
zotero
-
}x },
-
as: :static
-
-
1
mount Riiif::Engine => 'images', as: :riiif if Hyrax.config.iiif_image_server?
-
1
concern :searchable, Blacklight::Routes::Searchable.new
-
-
1
resource :catalog, only: [:index], as: 'catalog', path: '/catalog', controller: 'catalog' do
-
1
concerns :searchable
-
end
-
-
1
if Rails.configuration.authentication_method == "iu"
-
1
devise_for :users, controllers: { sessions: 'users/sessions', omniauth_callbacks: "users/omniauth_callbacks" }, skip: [:passwords, :registration]
-
1
devise_scope :user do
-
1
get('global_sign_out',
-
to: 'users/sessions#global_logout',
-
as: :destroy_global_session)
-
1
get 'sign_out', to: 'devise/sessions#destroy', as: :destroy_user_session
-
1
get 'users/auth/cas', to: 'users/omniauth_authorize#passthru', defaults: { provider: :cas }, as: "new_user_session"
-
end
-
else
-
devise_for :users
-
end
-
-
1
mount Qa::Engine => '/authorities'
-
1
mount Hyrax::Engine, at: '/'
-
# mount Hydra::RoleManagement::Engine => '/' # uncomment to expose Role management in UI
-
1
resources :welcome, only: 'index'
-
1
root 'hyrax/homepage#index'
-
1
curation_concerns_basic_routes
-
1
concern :exportable, Blacklight::Routes::Exportable.new
-
-
1
namespace :hyrax, path: :concern do
-
1
resources :collections do
-
1
member do
-
1
get 'display_provenance_log'
-
end
-
end
-
end
-
-
1
namespace :hyrax, path: :concern do
-
1
resources :file_sets do
-
1
member do
-
1
get 'display_provenance_log'
-
end
-
end
-
end
-
-
1
namespace :hyrax, path: :concern do
-
1
resources :data_sets do
-
1
member do
-
# post 'confirm'
-
1
get 'display_provenance_log'
-
1
get 'doi'
-
1
post 'doi'
-
1
post 'globus_download'
-
1
post 'globus_add_email'
-
1
get 'globus_add_email'
-
1
delete 'globus_clean_download'
-
1
post 'globus_download_add_email'
-
1
get 'globus_download_add_email'
-
1
post 'globus_download_notify_me'
-
1
get 'globus_download_notify_me'
-
1
post 'identifiers'
-
1
post 'tombstone'
-
1
post 'zip_download'
-
end
-
end
-
end
-
-
# Permissions routes
-
1
namespace :hyrax, path: :concern do
-
1
resources :permissions, only: [] do
-
1
member do
-
1
get :copy_access
-
end
-
end
-
end
-
-
1
resources :solr_documents, only: [:show], path: '/catalog', controller: 'catalog' do
-
1
concerns :exportable
-
end
-
-
1
constraints resque_web_constraint do
-
1
mount ResqueWeb::Engine => "/resque"
-
end
-
-
1
resources :bookmarks do
-
1
concerns :exportable
-
1
collection do
-
1
delete 'clear'
-
end
-
end
-
-
-
1
get '/provenance_log/(:id)', to: 'provenance_log#show'
-
1
get '/provenance_log_find/', to: 'provenance_log#show'
-
1
post '/provenance_log_find/', to: 'provenance_log#find'
-
1
get '/provenance_log_zip_download/', to: 'provenance_log#show'
-
1
post '/provenance_log_zip_download/', to: 'provenance_log#log_zip_download'
-
1
get '/provenance_log_deleted_works/', to: 'provenance_log#deleted_works'
-
1
post '/provenance_log_deleted_works/', to: 'provenance_log#deleted_works'
-
1
get '/guest_user_message', to: 'guest_user_message#show'
-
-
1
get '/sda/request/(:collection)/(:object)', to: 'archive#download_request'
-
1
get '/sda/status/(:collection)/(:object)', to: 'archive#status'
-
1
match '/sda/request/:collection/:object', to: 'archive#download_request', constraints: { object: /[^\/]+/ }, via: :get
-
-
# robots.txt and rack attack config forms
-
1
resource :robots, only: [:show, :edit, :update]
-
1
resource :rack_attack, only: [:edit, :update]
-
-
# Send ActionController::RoutingError to 404 page
-
# Must be the last route defined
-
1
match '*unmatched', to: 'application#rescue_404', via: :all
-
-
# For details on the DSL available within this file, see http://guides.rubyonrails.org/routing.html
-
end
-
%w(
-
1
.ruby-version
-
.rbenv-vars
-
tmp/restart.txt
-
tmp/caching-dev.txt
-
4
).each { |path| Spring.watch(path) }
-
# frozen_string_literal: true
-
-
1
class AbstractVirusScanner < Hydra::Works::VirusScanner
-
-
1
def initialize( file )
-
6
super( file )
-
end
-
-
1
def clam_av_scanner
-
scan_result = ClamAV.instance.method(:scanfile).call(file)
-
return ::Deepblue::VirusScanService::VIRUS_SCAN_NOT_VIRUS if scan_result.zero?
-
warning "A virus was found in #{file}: #{scan_result}"
-
::Deepblue::VirusScanService::VIRUS_SCAN_VIRUS
-
end
-
-
1
def infected?
-
::Deepblue::VirusScanService::VIRUS_SCAN_SKIPPED
-
end
-
-
1
def null_scanner
-
warning "Unable to check #{file} for viruses because no virus scanner is defined"
-
::Deepblue::VirusScanService::VIRUS_SCAN_SKIPPED_SERVICE_UNAVAILABLE
-
end
-
-
end
-
# frozen_string_literal: true
-
-
# monkey patch of active_fedora gem, lib/active_fedora/persistence.rb
-
-
1
module ActiveFedora
-
# = Active Fedora Persistence
-
1
module Persistence
-
1
extend ActiveSupport::Concern
-
-
1
def new_record?
-
358
return true if @ldp_source.subject.nil?
-
16
@ldp_source.get
-
false
-
rescue Ldp::Gone
-
false
-
rescue Ldp::NotFound
-
true
-
end
-
-
1
def persisted?
-
65
!(destroyed? || new_record?)
-
end
-
-
# Returns true if this object has been destroyed, otherwise returns false.
-
1
def destroyed?
-
65
@destroyed
-
end
-
-
# Saves a Base object, and any dirty attached files, then updates
-
# the Solr index for this object, unless option :update_index=>false is present.
-
# Indexing is also controlled by the `create_needs_index?' and `update_needs_index?' methods.
-
#
-
# @param [Hash] options
-
# @option options [Boolean] :update_index (true) set false to skip indexing
-
# @return [Boolean] true if save was successful, otherwise false
-
1
def save(*options)
-
create_or_update(*options)
-
end
-
-
1
def save!(*args)
-
create_or_update(*args)
-
end
-
-
# Pushes the object and all of its new or dirty attached files into Fedora
-
1
def update(attributes)
-
assign_attributes(attributes)
-
save
-
end
-
-
1
alias update_attributes update
-
-
# Updates its receiver just like #update but calls #save! instead
-
# of +save+, so an exception is raised if the record is invalid and saving will fail.
-
1
def update!(attributes)
-
assign_attributes(attributes)
-
save!
-
end
-
-
1
alias update_attributes! update!
-
-
# Deletes an object from Fedora and deletes the indexed record from Solr.
-
# Delete does not run any callbacks, so consider using _destroy_ instead.
-
# @param [Hash] opts
-
# @option opts [Boolean] :eradicate if passed in, eradicate the tombstone from Fedora
-
1
def delete(opts = {})
-
return self if new_record?
-
-
@destroyed = true
-
-
id = self.id ## cache so it's still available after delete
-
# Clear out the ETag
-
@ldp_source = build_ldp_resource(id)
-
begin
-
@ldp_source.delete
-
rescue Ldp::NotFound
-
raise ObjectNotFoundError, "Unable to find #{id} in the repository"
-
end
-
-
ActiveFedora::SolrService.delete(id) if ActiveFedora.enable_solr_updates?
-
self.class.eradicate(id) if opts[:eradicate]
-
freeze
-
end
-
-
# Delete the object from Fedora and Solr. Run any before/after/around callbacks for destroy
-
# @param [Hash] opts
-
# @option opts [Boolean] :eradicate if passed in, eradicate the tombstone from Fedora
-
1
def destroy(*opts)
-
raise ReadOnlyRecord if readonly?
-
delete(*opts)
-
end
-
-
# Deletes the record in the database and freezes this instance to reflect
-
# that no changes should be made (since they can't be persisted).
-
#
-
# There's a series of callbacks associated with #destroy!. If the
-
# <tt>before_destroy</tt> callback throws +:abort+ the action is cancelled
-
# and #destroy! raises ActiveFedora::RecordNotDestroyed.
-
# See ActiveFedora::Callbacks for further details.
-
1
def destroy!
-
destroy || _raise_record_not_destroyed
-
end
-
-
1
def eradicate
-
self.class.eradicate(id)
-
end
-
-
# Used when setting containment
-
1
def base_path_for_resource=(path)
-
71
@base_path = path
-
end
-
-
1
module ClassMethods
-
# Creates an object (or multiple objects) and saves it to the repository, if validations pass.
-
# The resulting object is returned whether the object was saved successfully to the repository or not.
-
#
-
# The +attributes+ parameter can be either be a Hash or an Array of Hashes. These Hashes describe the
-
# attributes on the objects that are to be created.
-
#
-
# ==== Examples
-
# # Create a single new object
-
# User.create(:first_name => 'Jamie')
-
#
-
# # Create an Array of new objects
-
# User.create([{ :first_name => 'Jamie' }, { :first_name => 'Jeremy' }])
-
#
-
# # Create a single object and pass it into a block to set other attributes.
-
# User.create(:first_name => 'Jamie') do |u|
-
# u.is_admin = false
-
# end
-
#
-
# # Creating an Array of new objects using a block, where the block is executed for each object:
-
# User.create([{ :first_name => 'Jamie' }, { :first_name => 'Jeremy' }]) do |u|
-
# u.is_admin = false
-
# end
-
1
def create(attributes = nil, &block)
-
if attributes.is_a?(Array)
-
attributes.collect { |attr| create(attr, &block) }
-
else
-
object = new(attributes, &block)
-
object.save
-
object
-
end
-
end
-
-
# Removes an object's tombstone so another object with the same uri may be created.
-
# NOTE: this is in violation of the linked data platform and is only here as a convience
-
# method. It shouldn't be used in the general course of repository operations.
-
1
def eradicate(uri)
-
gone?(uri) ? delete_tombstone(uri) : false
-
end
-
-
# Allows the user to find out if an id has been used in the system and then been deleted
-
# @param uri id in fedora that may or may not have been deleted
-
1
def gone?(uri)
-
ActiveFedora::Base.find(uri)
-
false
-
rescue Ldp::Gone
-
true
-
rescue ActiveFedora::ObjectNotFoundError
-
false
-
end
-
-
1
private
-
-
1
def delete_tombstone(uri)
-
tombstone = ActiveFedora::Base.id_to_uri(uri) + "/fcr:tombstone"
-
ActiveFedora.fedora.connection.delete(tombstone)
-
true
-
end
-
end
-
-
1
private
-
-
1
def create_or_update(*args)
-
raise ReadOnlyRecord if readonly?
-
result = new_record? ? _create_record(*args) : _update_record(*args)
-
result != false
-
end
-
-
## begin monkey patch
-
-
# Deals with preparing new object to be saved to Fedora, then pushes it and its attached files into Fedora.
-
1
def _create_record(_options = {})
-
assign_rdf_subject
-
serialize_attached_files
-
begin
-
@ldp_source = @ldp_source.create
-
rescue Ldp::Conflict
-
_create_record_ldp_source_create_retry
-
end
-
assign_uri_to_contained_resources
-
save_contained_resources
-
refresh
-
end
-
-
1
def _create_record_ldp_source_create_retry
-
# puts ">>>>>>>>>>>>>>>> _create_record_ldp_source_create_retry <<<<<<<<<<<<<<<<<<<<<"
-
attempts = 0
-
loop do
-
break if attempts > 99
-
new_id = assign_id
-
# puts ">>>>>>>>>>> assign_id=#{assign_id} <<<<<<<<<<<<<"
-
begin
-
@ldp_source = LdpResource.new(ActiveFedora.fedora.connection, self.class.id_to_uri(new_id), @resource)
-
@ldp_source = @ldp_source.create
-
return
-
rescue Ldp::Conflict
-
attempts += 1
-
raise if attempts > 99
-
end
-
end
-
end
-
-
## end monkey patch
-
-
1
def _update_record(_options = {})
-
serialize_attached_files
-
execute_sparql_update
-
save_contained_resources
-
refresh
-
end
-
-
1
def _raise_record_not_destroyed
-
@_association_destroy_exception ||= nil
-
raise @_association_destroy_exception || RecordNotDestroyed.new("Failed to destroy the record", self)
-
ensure
-
@_association_destroy_exception = nil
-
end
-
-
1
def refresh
-
@ldp_source = build_ldp_resource(id)
-
@resource = nil
-
end
-
-
1
def execute_sparql_update
-
change_set = ChangeSet.new(self, resource, changed_attributes.keys)
-
return true if change_set.empty?
-
ActiveFedora.fedora.ldp_resource_service.update(change_set, self.class, id)
-
end
-
-
# Override to tie in an ID minting service
-
1
def assign_id; end
-
-
# This is only used when creating a new record. If the object doesn't have an id
-
# and assign_id can mint an id for the object, then assign it to the resource.
-
# Otherwise the resource will have the id assigned by the LDP server
-
1
def assign_rdf_subject
-
@ldp_source = if !id && new_id = assign_id # rubocop:disable Lint/AssignmentInCondition
-
LdpResource.new(ActiveFedora.fedora.connection, self.class.id_to_uri(new_id), @resource)
-
else
-
LdpResource.new(ActiveFedora.fedora.connection, @ldp_source.subject, @resource, base_path_for_resource)
-
end
-
end
-
-
1
def base_path_for_resource
-
@base_path ||= ActiveFedora.fedora.host + default_base_path_for_resource
-
end
-
-
1
def default_base_path_for_resource
-
init_root_path if has_uri_prefix?
-
root_resource_path
-
end
-
-
# Check to see if the :base_path (from fedora.yml) exists in Fedora. If it doesn't exist, then create it.
-
1
def init_root_path
-
path = root_resource_path.gsub(/^\//, "")
-
ActiveFedora.fedora.connection.head(path)
-
rescue Ldp::NotFound
-
ActiveFedora.fedora.connection.put(path, "")
-
end
-
-
1
def assign_uri_to_contained_resources
-
contained_resources.each do |name, source|
-
source.uri = "#{uri}/#{name}"
-
end
-
end
-
-
1
def save_contained_resources
-
contained_resources.changed.each do |_, resource|
-
resource.save
-
end
-
end
-
-
1
def contained_resources
-
@contained_resources ||= attached_files.merge(contained_rdf_sources)
-
end
-
-
1
def contained_rdf_sources
-
@contained_rdf_sources ||=
-
AssociationHash.new(self, self.class.contained_rdf_source_reflections)
-
end
-
end
-
end
-
# this class is specific to UMich authentication only
-
-
# Default strategy for signing in a user, based on remote user attribute in headers.
-
1
module Devise::Behaviors
-
1
module HttpHeaderAuthenticatableBehavior
-
-
# Called if the user doesn't already have a rails session cookie
-
# Remote user needs to be present and not null
-
1
def valid_user?(headers)
-
remote_user = remote_user(headers)
-
remote_user.present? && remote_user != '(null)@umich.edu'
-
end
-
-
1
protected
-
-
# Remote user is coming back from cosign as uniquename.
-
# Append @umich.edu to this value to satisfy user model validations
-
1
def remote_user(headers)
-
return "#{headers['HTTP_X_REMOTE_USER']}@umich.edu" if headers['HTTP_X_REMOTE_USER'].present?
-
return nil
-
end
-
-
end
-
end
-
# this class is specific to UMich authentication only
-
-
1
require 'devise/strategies/http_header_authenticatable'
-
1
module Devise
-
1
module Models
-
1
module HttpHeaderAuthenticatable
-
1
extend ActiveSupport::Concern
-
-
1
def after_database_authentication
-
end
-
-
1
protected
-
-
-
end
-
end
-
end
-
# this class is specific to UMich authentication only
-
-
1
module Devise
-
1
module Strategies
-
1
class HttpHeaderAuthenticatable < ::Devise::Strategies::Base
-
-
1
include Devise::Behaviors::HttpHeaderAuthenticatableBehavior
-
-
# Called if the user doesn't already have a rails session cookie
-
1
def valid?
-
valid_user?(request.headers)
-
end
-
-
1
def authenticate!
-
user = remote_user(request.headers)
-
if user.present?
-
Rails.logger.debug "[AUTHN] HttpHeaderAuthenticatable#authenticate! succeeded: #{user}"
-
u = User.find_by_user_key(user)
-
if u.nil?
-
u = User.create(email: user)
-
end
-
success!(u)
-
else
-
Rails.logger.debug '[AUTHN] HttpHeaderAuthenticatable#authenticate! failed.'
-
fail!
-
end
-
end
-
-
end
-
end
-
end
-
-
1
Warden::Strategies.add(:http_header_authenticatable, Devise::Strategies::HttpHeaderAuthenticatable)
-
# frozen_string_literal: true
-
-
1
class EmailLogger < Logger
-
-
1
def format_message( _severity, _timestamp, _progname, msg )
-
"#{msg}\n"
-
end
-
-
end
-
-
# don't forget to request log roll-over script to not roll these files over
-
1
logfile = File.open( Rails.root.join( 'log', "email_#{Rails.env}.log" ), 'a' ) # create log file
-
1
logfile.sync = true # automatically flushes data to file
-
1
EMAIL_LOGGER = EmailLogger.new( logfile ) # constant accessible anywhere
-
1
module Extensions
-
1
module ActiveFedora
-
1
module File
-
1
module EscapingObsoletions
-
# modified from active_fedora: update obsolete URI methods to CGI
-
1
def ldp_headers
-
headers = { 'Content-Type'.freeze => mime_type, 'Content-Length'.freeze => content.size.to_s }
-
headers['Content-Disposition'.freeze] = "attachment; filename=\"#{::CGI.escape(@original_name)}\"" if @original_name
-
headers
-
end
-
end
-
end
-
end
-
end
-
# extensions.rb - loads monkeypatches for samvera libraries
-
-
# handle downloads from fedora, archive server storage
-
1
Hyrax::DownloadsController.prepend Extensions::Hyrax::DownloadsController::VariableDownloadSourcing
-
-
# update obsolete URI escaping methods
-
1
Hydra::AccessControls::Permission.prepend Extensions::Hydra::AccessControls::Permission::EscapingObsoletions
-
1
ActiveFedora::File.prepend Extensions::ActiveFedora::File::EscapingObsoletions
-
-
# Collections search
-
1
Qa::Authorities::Collections.prepend Extensions::Qa::Authorities::Collections::CollectionsSearch
-
-
# return false for render_bookmarks_control? in CollectionsController
-
1
Hyrax::CollectionsController.prepend Extensions::Hyrax::CollectionsController::RenderBookmarksControl
-
1
Hyrax::My::CollectionsController.prepend Extensions::Hyrax::CollectionsController::RenderBookmarksControl
-
-
# Statistics By Date Report page
-
1
Hyrax::AdminStatsPresenter.prepend Extensions::Hyrax::AdminStatsPresenter::AdminStatsPresenterBehavior
-
-
# accessibility improvements
-
1
Hyrax::CollapsableSectionPresenter.prepend Extensions::Hyrax::CollapsableSectionPresenter::CollapsableSectionPresenterBehavior
-
1
module Extensions
-
1
module Hydra
-
1
module AccessControls
-
1
module Permission
-
1
module EscapingObsoletions
-
# modified to use CGI method instead of obsolete URI method
-
1
def agent_name
-
70
::CGI.unescape(parsed_agent.last)
-
end
-
-
# modified to use CGI method instead of obsolete URI method
-
1
def build_agent_resource(prefix, name)
-
71
[::Hydra::AccessControls::Agent.new(::RDF::URI.new("#{prefix}##{::CGI.escape(name)}"))]
-
end
-
end
-
end
-
end
-
end
-
end
-
1
module Extensions
-
1
module Hyrax
-
1
module AdminStatsPresenter
-
1
module AdminStatsPresenterBehavior
-
-
1
def valid_dates
-
6
start_date.nil? || start_date <= second_date
-
end
-
-
1
def second_date
-
6
end_date || Date.current
-
end
-
-
1
def clear_invalid_dates!
-
2
if start_date && start_date > second_date
-
1
stats_filters[:start_date] = nil
-
1
stats_filters[:end_date] = nil
-
end
-
end
-
end
-
end
-
end
-
end
-
# modified from hyrax 2.9.6: added aria-label attribute
-
1
module Extensions
-
1
module Hyrax
-
1
module CollapsableSectionPresenter
-
1
module CollapsableSectionPresenterBehavior
-
1
private
-
1
def button_tag
-
2
content_tag(:a,
-
role: 'button',
-
class: "#{button_class}collapse-toggle",
-
data: { toggle: 'collapse' },
-
href: "##{id}",
-
'aria-label' => "Expand / Collapse #{text}",
-
'aria-expanded' => open,
-
'aria-controls' => id) do
-
2
safe_join([content_tag(:span, '', class: icon_class, 'aria-hidden' => true),
-
content_tag(:span, text)], ' ')
-
end
-
end
-
end
-
end
-
end
-
end
-
1
module Extensions
-
1
module Hyrax
-
1
module CollectionsController
-
1
module RenderBookmarksControl
-
1
protected
-
-
# disable the bookmark control from displaying in gallery view
-
1
def render_bookmarks_control?
-
2
false
-
end
-
end
-
end
-
end
-
end
-
# modified from hyrax for bypass_fedora case
-
1
module Extensions
-
1
module Hyrax
-
1
module DownloadsController
-
1
module VariableDownloadSourcing
-
# Render the 404 page if the file doesn't exist.
-
# Otherwise renders the file.
-
1
def show
-
case file
-
when ::ActiveFedora::File
-
case file.mime_type
-
when /access-type=URL/
-
# for original files that bypass fedora, manage archival file interactions on FileSet show page
-
redirect_to "/concern/file_sets/#{file.id.split('/').first}"
-
else
-
# For original files that are stored in fedora
-
super
-
end
-
when ::String
-
# For derivatives stored on the local file system
-
send_local_content
-
else
-
render_404
-
end
-
end
-
end
-
end
-
end
-
end
-
# modified from hyrax 2.9.6
-
1
module Extensions
-
1
module Qa
-
1
module Authorities
-
1
module Collections
-
1
module CollectionsSearch
-
1
def search(_q, controller)
-
# The Hyrax::CollectionSearchBuilder expects a current_user
-
return [] unless controller.current_user
-
repo = ::CatalogController.new.repository
-
# line below facilitates searching on partial word match
-
controller.params[:q] << '*' if controller.params[:q].to_s.size >= 2
-
builder = search_builder(controller)
-
response = repo.search(builder)
-
docs = response.documents
-
docs.map do |doc|
-
id = doc.id
-
title = doc.title
-
{ id: id, label: title, value: id }
-
end
-
end
-
end
-
end
-
end
-
end
-
end
-
# This was taken directly from Sufia's GenericFile::MimeTypes
-
1
module Hydra::Works
-
1
module MimeTypes
-
1
extend ActiveSupport::Concern
-
-
1
def pdf?
-
self.class.pdf_mime_types.include? mime_type
-
end
-
-
1
def image?
-
self.class.image_mime_types.include? mime_type
-
end
-
-
1
def video?
-
self.class.video_mime_types.include? mime_type
-
end
-
-
1
def audio?
-
self.class.audio_mime_types.include? mime_type
-
end
-
-
1
def office_document?
-
self.class.office_document_mime_types.include? mime_type
-
end
-
-
1
module ClassMethods
-
1
def image_mime_types
-
['image/png', 'image/jpeg', 'image/jpg', 'image/jp2', 'image/bmp', 'image/gif', 'image/tiff']
-
end
-
-
1
def pdf_mime_types
-
['application/pdf']
-
end
-
-
1
def video_mime_types
-
['video/mpeg', 'video/mp4', 'video/webm', 'video/x-msvideo', 'video/avi', 'video/quicktime', 'application/mxf']
-
end
-
-
1
def audio_mime_types
-
# audio/x-wave is the mime type that fits 0.6.0 returns for a wav file.
-
# audio/mpeg is the mime type that fits 0.6.0 returns for an mp3 file.
-
['audio/mp3', 'audio/mpeg', 'audio/wav', 'audio/x-wave', 'audio/x-wav', 'audio/ogg']
-
end
-
-
1
def office_document_mime_types
-
['text/plain',
-
'text/rtf',
-
'application/msword',
-
'application/vnd.openxmlformats-officedocument.wordprocessingml.document',
-
'application/vnd.oasis.opendocument.text',
-
'application/vnd.ms-excel',
-
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
-
'application/vnd.ms-powerpoint',
-
'application/vnd.openxmlformats-officedocument.presentationml.presentation']
-
end
-
end
-
end
-
end
-
# frozen_string_literal: true
-
-
1
module Hydra::Works
-
-
1
module VirusCheck
-
1
extend ActiveSupport::Concern
-
-
# Move this to Ingest step
-
# included do
-
# validate :must_not_detect_viruses
-
#
-
# def must_not_detect_viruses
-
# scan_result = virus_scan
-
# return true unless virus_scan_detected_virus?( scan_result: scan_result )
-
# errors.add( :base, "Failed to verify uploaded file is not a virus" )
-
# false
-
# end
-
#
-
# end
-
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
require 'abstract_virus_scanner'
-
-
1
class NullVirusScanner < AbstractVirusScanner
-
-
1
def initialize( file )
-
3
super( file )
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
class ProvenanceLogger < Logger
-
1
def format_message( _severity, _timestamp, _progname, msg )
-
"#{msg}\n"
-
end
-
end
-
-
# logfile = File.open("#{Rails.root}/log/custom.log", 'a') # create log file
-
1
logfile = File.open( DeepBlueDocs::Application.config.provenance_log_path, 'a' ) # create log file
-
1
logfile.sync = true # automatically flushes data to file
-
1
PROV_LOGGER = ProvenanceLogger.new( logfile ) # constant accessible anywhere
-
# frozen_string_literal: true
-
-
1
module Deepblue
-
1
class SchedulerLogger < Logger
-
-
1
def format_message( _severity, _timestamp, _progname, msg )
-
"#{msg}\n"
-
end
-
-
end
-
-
# don't forget to request log roll-over script to not roll these files over
-
1
logfile = File.open( Rails.root.join( 'log', "scheduler_#{Rails.env}.log" ), 'a' ) # create log file
-
1
logfile.sync = true # automatically flushes data to file
-
1
SCHEDULER_LOGGER = SchedulerLogger.new(logfile ) # constant accessible anywhere
-
end
-
# frozen_string_literal: true
-
-
1
module Deepblue
-
-
1
require 'tasks/task_logger'
-
-
# rubocop:disable Rails/Output
-
1
class AbstractTask
-
-
1
DEFAULT_TO_CONSOLE = true
-
-
1
DEFAULT_VERBOSE = false
-
-
1
attr_reader :options
-
-
1
attr_accessor :verbose, :to_console, :logger
-
-
1
def initialize( options: {} )
-
@options = TaskHelper.task_options_parse options
-
if @options.key?( :error ) || @options.key?( 'error' )
-
puts "WARNING: options error #{@options['error']}"
-
puts "options=#{options}" if @options.key? 'error'
-
puts "@options=#{@options}" if @options.key? 'error'
-
end
-
@to_console = TaskHelper.task_options_value( @options, key: 'to_console', default_value: DEFAULT_VERBOSE )
-
@verbose = TaskHelper.task_options_value( @options, key: 'verbose', default_value: DEFAULT_VERBOSE )
-
puts "@verbose=#{@verbose}" if @verbose
-
end
-
-
1
def logger
-
@logger ||= logger_initialize
-
end
-
-
1
def task_msg( msg )
-
logger.debug msg
-
puts msg if @to_console
-
end
-
-
1
def task_options_value( key:, default_value: nil, verbose: false )
-
TaskHelper.task_options_value( @options, key: key, default_value: default_value, verbose: verbose )
-
end
-
-
1
protected
-
-
1
def logger_initialize
-
# TODO: add some flags to the input yml file for log level and Rails logging integration
-
TaskHelper.logger_new
-
end
-
-
end
-
# rubocop:enable Rails/Output
-
-
end
-
# frozen_string_literal: true
-
-
1
require 'logger'
-
-
1
module Deepblue
-
-
1
class TaskLogger < Logger
-
-
# TODO: add flags for turning on and off parts of message
-
-
1
def format_message( _severity, _timestamp, _progname, msg )
-
# "#{timestamp.to_formatted_s(:db)} #{severity} User: #{EmailHelper.user_email} #{msg}\n"
-
"#{msg}\n"
-
end
-
-
end
-
-
end
-
# frozen_string_literal: true
-
-
# An AV class that streams the file to an already-running
-
# clamav daemon
-
-
1
require 'abstract_virus_scanner'
-
1
require 'null_virus_scanner'
-
1
require 'clamav/client'
-
-
1
class UMichClamAVDaemonScanner < AbstractVirusScanner
-
-
# standard umich clamav configuration (from /etc/clamav/clamav.conf)
-
-
1
CONNECTION_TYPE = :tcp
-
1
PORT = 3310
-
1
MACHINE = '127.0.0.1'
-
-
1
CHUNKSIZE = 4096
-
-
1
class CannotConnectClient < NullVirusScanner
-
-
1
def initialize( file )
-
3
super( file )
-
end
-
-
end
-
-
1
attr_accessor :client
-
-
1
def initialize( filename )
-
3
super
-
@client = begin
-
3
connection = ClamAV::Connection.new( socket: ::TCPSocket.new('127.0.0.1', 3310),
-
wrapper: ::ClamAV::Wrappers::NewLineWrapper.new )
-
ClamAV::Client.new(connection)
-
rescue Errno::ECONNREFUSED => e # rubocop:disable Lint/UselessAssignment
-
3
CannotConnectClient.new( filename )
-
end
-
end
-
-
# Check to see if we can connect to the configured
-
# ClamAV daemon
-
1
def alive?
-
3
case client
-
when CannotConnectClient
-
3
false
-
else
-
client.execute( ClamAV::Commands::PingCommand.new )
-
end
-
end
-
-
1
def infected?
-
::Deepblue::LoggingHelper.bold_debug [ ::Deepblue::LoggingHelper.here,
-
::Deepblue::LoggingHelper.called_from,
-
"UMichClamAVDaemonScanner.infected? File '#{file}' exists? #{File.exist? file}" ]
-
unless alive?
-
warning "Cannot connect to virus scanner. Skipping file #{file}"
-
return ::Deepblue::VirusScanService::VIRUS_SCAN_SKIPPED_SERVICE_UNAVAILABLE
-
end
-
resp = scan_response
-
rv = case resp
-
when ClamAV::SuccessResponse
-
info "Clean virus check for '#{file}'"
-
::Deepblue::VirusScanService::VIRUS_SCAN_NOT_VIRUS
-
when ClamAV::VirusResponse
-
warn "Virus #{resp.virus_name} found in file '#{file}'"
-
::Deepblue::VirusScanService::VIRUS_SCAN_VIRUS
-
when ClamAV::ErrorResponse
-
warn "ClamAV error: #{resp.error_str} for file #{file}. File not scanned!"
-
::Deepblue::VirusScanService::VIRUS_SCAN_ERROR # err on the side of trust? Need to think about this
-
else
-
warn "ClamAV response unknown type '#{resp.class}': #{resp}. File not scanned!"
-
::Deepblue::VirusScanService::VIRUS_SCAN_UNKNOWN
-
end
-
return rv
-
end
-
-
1
def scan_response
-
begin
-
file_io = File.open( file, 'rb' )
-
rescue => e
-
msg = "Can't open file #{file} for scanning: #{e}"
-
error msg
-
raise msg
-
end
-
-
scan( file_io )
-
end
-
-
# Do the scan by streaming to the daemon
-
# @param [#read] io The IO stream (probably an open file) to read from
-
# @return A ClamAV::*Response object
-
1
def scan(io)
-
cmd = UMInstreamScanner.new(io, CHUNKSIZE)
-
client.execute(cmd)
-
end
-
-
-
1
private
-
-
# Set up logging for the clamav daemon scanner
-
-
1
def debug( msg )
-
Hyrax.logger&.debug( msg )
-
end
-
-
1
def error( msg )
-
Hyrax.logger&.error( msg )
-
end
-
-
1
def info( msg )
-
Hyrax.logger&.info( msg )
-
end
-
-
1
def warning( msg )
-
Hyrax.logger&.warn( msg )
-
end
-
-
end
-
-
-
# Stream a file to the AV scanner in chucks to avoid
-
# reading it all into memory. Internal to how
-
# ClamAV::Client works
-
1
class UMInstreamScanner < ClamAV::Commands::InstreamCommand
-
-
1
def call(conn)
-
conn.write_request("INSTREAM")
-
while (packet = @io.read(@max_chunk_size))
-
scan_packet(conn, packet)
-
end
-
send_end_of_file(conn)
-
av_return_status(conn)
-
rescue => e
-
ClamAV::ErrorResponse.new( "Error sending data to ClamAV Daemon: #{e}" )
-
end
-
-
1
def av_return_status(conn)
-
get_status_from_response(conn.read_response)
-
end
-
-
1
def send_end_of_file(conn)
-
conn.raw_write("\x00\x00\x00\x00")
-
end
-
-
1
def scan_packet(conn, packet)
-
packet_size = [packet.size].pack("N")
-
conn.raw_write("#{packet_size}#{packet}")
-
end
-
-
end
-
-
# To use a virus checker other than ClamAV:
-
# class MyScanner < Hydra::Works::VirusScanner
-
# def infected?
-
# my_result = Scanner.check_for_viruses(file)
-
# [return true or false]
-
# end
-
# end
-
# frozen_string_literal: true
-
-
1
module Deepblue
-
1
class UploadLogger < Logger
-
-
1
def format_message( _severity, _timestamp, _progname, msg )
-
"#{msg}\n"
-
end
-
-
end
-
-
# don't forget to request log roll-over script to not roll these files over
-
1
logfile = File.open( Rails.root.join( 'log', "upload_#{Rails.env}.log" ), 'a' ) # create log file
-
1
logfile.sync = true # automatically flushes data to file
-
1
UPLOAD_LOGGER = UploadLogger.new( logfile ) # constant accessible anywhere
-
end
-
1
require 'rails_helper'
-
-
1
RSpec.describe Hyrax::Actors::DataSetActor do
-
1
let(:work) { DataSet.new }
-
1
let(:depositor) { create(:user) }
-
1
let(:depositor_ability) { ::Ability.new(depositor) }
-
1
let(:env) { Hyrax::Actors::Environment.new(work, depositor_ability, {}) }
-
-
# describe '#model_actor' do
-
# subject { described_class.new('Test').send(:model_actor, env) }
-
#
-
# it "preserves the namespacing" do
-
# is_expected.to be_kind_of Hyrax::Actors::DataSetActor
-
# end
-
# end
-
end
-
# Generated via
-
# `rails generate hyrax:work Dissertation`
-
1
require 'rails_helper'
-
-
1
RSpec.describe Hyrax::Actors::DissertationActor do
-
1
it "has tests" do
-
1
skip "Add your tests here"
-
end
-
end
-
# Generated via
-
# `rails generate hyrax:work GenericWork`
-
1
require 'rails_helper'
-
-
1
RSpec.describe Hyrax::Actors::GenericWorkActor do
-
1
it "has tests" do
-
1
skip "Add your tests here"
-
end
-
end
-
1
require 'rails_helper'
-
-
1
RSpec.describe Qa::Authorities::Collections do
-
1
subject(:service) { described_class.new }
-
1
let(:controller) { Qa::TermsController.new }
-
7
let(:user1) { FactoryBot.build(:user) }
-
-
1
let!(:collection1) do
-
6
FactoryBot.build(:collection_lw,
-
title: ['foo bar'],
-
user: user1,
-
with_solr_document: true)
-
end
-
-
1
let!(:collection2) do
-
FactoryBot.build(:collection_lw,
-
title: ['foo'],
-
user: user1,
-
with_solr_document: true)
-
end
-
-
1
before do
-
allow(controller).to receive(:params).and_return(params)
-
allow(controller).to receive(:current_user).and_return(user1)
-
end
-
-
1
describe '#search' do
-
1
context 'with partial starting term' do
-
1
let(:params) { ActionController::Parameters.new(q: 'fo') }
-
-
1
it 'lists collection' do
-
expect(service.search(nil, controller))
-
.to contain_exactly(include(id: collection1.id), include(id: collection2.id))
-
end
-
end
-
-
1
context 'with partial middle term' do
-
1
let(:params) { ActionController::Parameters.new(q: 'ba') }
-
-
1
it 'lists collection' do
-
expect(service.search(nil, controller))
-
.to contain_exactly(include(id: collection1.id))
-
end
-
end
-
-
1
context 'with full term' do
-
1
let(:params) { ActionController::Parameters.new(q: 'foo bar') }
-
-
1
it 'lists collection' do
-
expect(service.search(nil, controller))
-
.to contain_exactly(include(id: collection1.id))
-
end
-
end
-
-
1
context 'with unmatched term' do
-
1
let(:params) { ActionController::Parameters.new(q: 'deadbeef') }
-
-
1
it 'lists nothing' do
-
expect(service.search(nil, controller))
-
.to match_array ([])
-
end
-
end
-
-
1
context 'with too short term' do
-
1
let(:params) { ActionController::Parameters.new(q: 'f') }
-
-
1
it 'lists nothing' do
-
expect(service.search(nil, controller))
-
.to match_array ([])
-
end
-
end
-
-
1
context 'with no term' do
-
1
let(:params) { ActionController::Parameters.new() }
-
-
1
it 'lists everything' do
-
expect(service.search(nil, controller))
-
.to include(include(id: collection1.id), include(id: collection2.id))
-
end
-
end
-
end
-
end
-
1
require 'rails_helper'
-
-
-
1
RSpec.describe ApplicationController do
-
-
1
describe "#global_request_logging" do
-
1
before {
-
3
allow(subject).to receive(:request).and_return OpenStruct.new(remote_ip: "1.2.3.4", method: "GET", url: "/", headers: {"HTTP_USER_AGENT" => "agent"})
-
3
allow(subject).to receive(:response).and_return OpenStruct.new(status: "responsive")
-
-
3
allow(subject.logger).to receive(:info).with "ACCESS: 1.2.3.4, GET /, agent"
-
3
allow(subject.logger).to receive(:info).with "response_status: responsive"
-
}
-
1
context "when called with an empty block" do
-
1
it do
-
1
expect(subject.logger).to receive(:info).with "ACCESS: 1.2.3.4, GET /, agent"
-
1
expect(subject.logger).to receive(:info).with "response_status: responsive"
-
-
1
subject.global_request_logging { }
-
end
-
end
-
-
1
context "when called with a block" do
-
3
specify { expect { |b| subject.global_request_logging(&b) }.to yield_control }
-
3
specify { expect { |b| subject.global_request_logging(&b) }.to yield_with_no_args }
-
end
-
end
-
-
-
1
describe "#clear_session_user" do
-
1
context "when called with nil request" do
-
1
it "returns nil_request" do
-
1
skip "Add a test"
-
end
-
end
-
-
1
context "when called with request with value" do
-
1
it "clears the session user" do
-
1
skip "Add a test"
-
end
-
end
-
end
-
-
-
1
pending "#after_authentication"
-
-
-
1
describe "#rescue_404" do
-
1
it "renders a not found response" do
-
1
get :rescue_404
-
1
expect(response.status).to eq 404
-
end
-
end
-
-
-
1
describe "#set_locale" do
-
1
context "when locale param is present" do
-
1
before {
-
2
allow(subject).to receive(:params).and_return "locale" => "italian"
-
}
-
-
1
context "when constrained_locale has value" do
-
1
before {
-
1
allow(subject).to receive(:constrained_locale).and_return "english"
-
}
-
1
it "sets locale param to constrained_locale" do
-
1
allow(subject).to receive(:params).and_return "locale" => "english"
-
end
-
end
-
-
1
context "when I18n.default_locale has value" do
-
1
before {
-
1
allow(I18n).to receive(:default_locale).and_return "spanish"
-
}
-
1
it "sets locale param to I18n.default_locale" do
-
1
allow(subject).to receive(:params).and_return "locale" => "spanish"
-
end
-
end
-
end
-
-
1
context "when locale param is not present" do
-
1
it "returns nil" do
-
1
expect(subject.set_locale).to be_blank
-
end
-
end
-
end
-
-
-
1
describe "#constrained_locale" do
-
1
context "when locale param in available_translations" do
-
1
skip "Add a test"
-
end
-
-
1
context "when locale not in available_translations" do
-
1
skip "Add a test"
-
end
-
end
-
-
end
-
1
require 'rails_helper'
-
-
1
RSpec.describe CatalogController do
-
-
1
pending "#self.uploaded_field"
-
-
1
pending "#self.modified_field"
-
-
1
pending "configure_blacklight"
-
-
1
describe '#render_bookmarks_control?' do
-
1
it 'returns false' do
-
1
expect(controller.render_bookmarks_control?).to eq false
-
end
-
end
-
-
end
-
1
require 'rails_helper'
-
-
1
RSpec.describe GuestUserMessageController do
-
-
1
describe "#show" do
-
1
it "renders a response" do
-
1
get :show
-
1
expect(response.status).to eq 200
-
end
-
end
-
-
-
end
-
1
require 'rails_helper'
-
-
1
RSpec.describe Hyrax::CollectionsController do
-
2
let(:subject) { described_class.new }
-
2
let(:collection) { FactoryBot.create(:collection_lw) }
-
-
1
describe "#render_bookmarks_control?" do
-
1
it "returns false" do
-
1
expect(subject.send(:render_bookmarks_control?)).to eq false
-
end
-
end
-
-
1
describe "#display_provenance_log" do
-
1
it "redirects" do
-
1
get :display_provenance_log, params: { id: collection.id }
-
expect(response).to be_redirect
-
end
-
end
-
end
-
1
require 'rails_helper'
-
-
1
class BannerInfoMock
-
-
1
def initialize local_path
-
4
@local_path = local_path
-
end
-
-
1
def save (path, verify = true)
-
end
-
-
1
def local_path
-
1
@local_path
-
end
-
-
1
def delete_all
-
end
-
end
-
-
-
1
RSpec.describe Hyrax::Dashboard::CollectionsController do
-
16
let(:subject) { described_class.new }
-
-
1
include Hyrax::BrandingHelper
-
-
1
describe 'constants' do
-
1
it do
-
1
expect( Hyrax::Dashboard::CollectionsController::EVENT_NOTE ).to eq 'Hyrax::Dashboard::CollectionsController'
-
1
expect( Hyrax::Dashboard::CollectionsController::PARAMS_KEY ).to eq 'collection'
-
end
-
end
-
-
1
describe "#after_create" do
-
1
before {
-
1
allow(subject).to receive(:monkey_after_create)
-
1
allow(subject).to receive(:workflow_create)
-
}
-
1
it "calls monkey_after_create and workflow_create" do
-
1
expect(subject).to receive(:monkey_after_create)
-
1
expect(subject).to receive(:workflow_create)
-
-
1
subject.after_create
-
end
-
end
-
-
1
describe "#destroy" do
-
1
before {
-
1
allow(subject).to receive(:workflow_destroy)
-
1
allow(subject).to receive(:monkey_destroy)
-
}
-
1
it "calls workflow_destroy and monkey_destroy" do
-
1
expect(subject).to receive(:workflow_destroy)
-
1
expect(subject).to receive(:monkey_destroy)
-
-
1
subject.destroy
-
end
-
end
-
-
1
describe "#show" do
-
1
before {
-
3
allow(subject).to receive(:presenter)
-
3
allow(subject).to receive(:query_collection_members)
-
}
-
-
1
context "when @collection.collection_type.brandable? is true" do
-
-
1
context "when banner_info not empty" do
-
1
before {
-
1
subject.instance_variable_set(:@collection, OpenStruct.new(id: 63, collection_type: OpenStruct.new( brandable?: true ) ))
-
1
allow(subject).to receive(:collection_banner_info).with(id: 63).and_return ["primo", "secundo"]
-
1
allow(subject).to receive(:brand_path).with(collection_branding_info: "primo")
-
}
-
1
it "calls collection_banner_info and brand_path" do
-
1
expect(subject).to receive(:collection_banner_info).with(id: 63)
-
1
expect(subject).to receive(:brand_path)
-
-
1
subject.show
-
end
-
end
-
-
1
context "when banner_info empty" do
-
1
before {
-
1
subject.instance_variable_set(:@collection, OpenStruct.new(id: 36, collection_type: OpenStruct.new( brandable?: true ) ))
-
1
allow(subject).to receive(:collection_banner_info).with(id: 36).and_return []
-
}
-
1
it "calls collection_banner_info" do
-
1
expect(subject).to receive(:collection_banner_info).with(id: 36)
-
1
expect(subject).not_to receive(:brand_path)
-
1
subject.show
-
end
-
end
-
end
-
-
1
context "when @collection.collection_type.brandable? is false" do
-
1
before {
-
1
subject.instance_variable_set(:@collection, OpenStruct.new( collection_type: OpenStruct.new( brandable?: false ) ))
-
}
-
1
it "calls presenter and query_collection_members functions" do
-
1
expect(subject).not_to receive(:collection_banner_info)
-
1
expect(subject).not_to receive(:brand_path)
-
1
subject.show
-
end
-
end
-
-
1
after {
-
3
expect(subject).to have_received(:presenter)
-
3
expect(subject).to have_received(:query_collection_members)
-
}
-
end
-
-
1
describe "#curation_concern" do
-
1
before {
-
2
parameters = { :id => "collectibles" }
-
2
allow(subject).to receive(:params).and_return parameters
-
2
allow(ActiveFedora::Base).to receive(:find).with("collectibles").and_return "extensive"
-
}
-
-
1
context "when @collection has value" do
-
1
before {
-
1
subject.instance_variable_set(:@collection, "collection")
-
}
-
-
1
it "returns @collection" do
-
1
expect(ActiveFedora::Base).not_to receive(:find)
-
-
1
expect(subject.curation_concern).to eq "collection"
-
end
-
end
-
-
1
context "when @collection is nil or false" do
-
1
it "calls Base.find and sets @collection to the result" do
-
1
expect(ActiveFedora::Base).to receive(:find)
-
-
1
expect(subject.curation_concern).to eq "extensive"
-
-
1
subject.instance_variable_get(:@collection) == "extensive"
-
end
-
end
-
end
-
-
1
describe "#default_event_note" do
-
1
it "returns string" do
-
1
expect(subject.default_event_note).to eq 'Hyrax::Dashboard::CollectionsController'
-
end
-
end
-
-
1
describe "#params_key" do
-
1
it "returns string" do
-
1
expect(subject.params_key).to eq 'collection'
-
end
-
end
-
-
1
describe "#process_banner_input" do
-
1
before {
-
3
allow(subject).to receive(:update_existing_banner).and_return "update: existence"
-
3
allow(subject).to receive(:remove_banner)
-
}
-
1
context "when banner_unchanged param true" do
-
1
before {
-
1
parameters = {"banner_unchanged" => "true"}
-
1
allow(subject).to receive(:params).and_return parameters
-
}
-
1
it "returns update_existing_banner" do
-
1
expect(subject).not_to receive(:remove_banner)
-
1
expect(subject.process_banner_input).to eq "update: existence"
-
end
-
end
-
-
1
context "when banner_unchanged param false" do
-
1
context "when banner_files param true" do
-
1
before {
-
1
parameters = {"banner_unchanged" => "false", "banner_files" => true}
-
1
allow(subject).to receive(:params).and_return parameters
-
1
allow(subject).to receive(:add_new_banner).with(true).and_return "new banner day"
-
}
-
1
it "returns update_existing_banner" do
-
1
expect(subject).to receive(:add_new_banner).with(true)
-
-
1
expect(subject.process_banner_input).to eq "new banner day"
-
end
-
end
-
-
1
context "when banner_files param false" do
-
1
before {
-
1
parameters = {"banner_unchanged" => "false", "banner_files" => false}
-
1
allow(subject).to receive(:params).and_return parameters
-
}
-
1
it "returns update_existing_banner" do
-
1
expect(subject).not_to receive(:add_new_banner)
-
-
1
expect(subject.process_banner_input).to be_blank
-
end
-
end
-
-
1
after {
-
2
expect(subject).not_to have_received(:update_existing_banner)
-
2
expect(subject).to have_received(:remove_banner)
-
}
-
end
-
end
-
-
-
1
describe "#update_existing_banner" do
-
1
banner_info1 = BannerInfoMock.new("path1")
-
1
banner_info2 = BannerInfoMock.new("path2")
-
-
1
before {
-
1
subject.instance_variable_set(:@collection, OpenStruct.new( id: 3000 ))
-
1
allow(subject).to receive(:collection_banner_info).with(id: 3000).and_return [banner_info1, banner_info2]
-
}
-
1
it do
-
1
expect(subject).to receive(:collection_banner_info).with( id: 3000 )
-
1
expect(banner_info1).to receive(:save).with("path1", false)
-
-
1
subject.update_existing_banner
-
end
-
end
-
-
-
1
describe "#add_new_banner" do
-
1
bannerInfo = BannerInfoMock.new "kawaii"
-
1
before {
-
1
subject.instance_variable_set(:@collection, OpenStruct.new( id: 4000 ))
-
-
1
allow(subject).to receive(:uploaded_files).with("uploaded file ids").and_return [OpenStruct.new(file_url: "kawaii"), OpenStruct.new(file_url: "banzai")]
-
1
allow(File).to receive(:split).with("kawaii").and_return ["konnichiwa", "sayonara"]
-
1
allow(CollectionBrandingInfo).to receive(:new).with(collection_id: 4000, filename: "sayonara", role: "banner", alt_txt: "", target_url: "")
-
.and_return bannerInfo
-
-
1
allow(bannerInfo).to receive(:save).with("kawaii")
-
}
-
1
it do
-
1
expect(subject).to receive(:uploaded_files).with("uploaded file ids")
-
1
expect(CollectionBrandingInfo).to receive(:new).with(collection_id: 4000, filename: "sayonara", role: "banner", alt_txt: "", target_url: "")
-
1
expect(bannerInfo).to receive(:save).with("kawaii")
-
-
1
subject.add_new_banner("uploaded file ids")
-
end
-
end
-
-
-
1
describe "#remove_banner" do
-
1
bannerInfo = BannerInfoMock.new "a to z"
-
-
1
before {
-
1
subject.instance_variable_set(:@collection, OpenStruct.new( id: 5000 ))
-
-
1
allow(subject).to receive(:collection_banner_info).with(id: 5000).and_return bannerInfo
-
1
allow(bannerInfo).to receive(:delete_all)
-
}
-
1
it do
-
1
expect(subject).to receive(:collection_banner_info).with(id: 5000)
-
1
expect(bannerInfo).to receive(:delete_all)
-
-
1
subject.remove_banner
-
end
-
end
-
-
end
-
1
require 'rails_helper'
-
-
1
RSpec.describe Hyrax::DataSetsController do # rubocop:disable RSpec/EmptyExampleGroup
-
-
# before(:all ) do
-
# puts "DataSet ids before=#{DataSet.all.map { |ds| ds.id }}"
-
# #puts "FileSet ids before=#{FileSet.all.map { |fs| fs.id }}"
-
# end
-
#
-
# after(:all ) do
-
# #puts "FileSet ids after=#{FileSet.all.map { |fs| fs.id }}"
-
# puts "DataSet ids after=#{DataSet.all.map { |ds| ds.id }}"
-
# # clean up created DataSet
-
# DataSet.all.each { |ds| ds.delete }
-
# #FileSet.all.each { |fs| fs.delete }
-
# end
-
-
1
include Devise::Test::ControllerHelpers
-
1
routes { Rails.application.routes }
-
1
let(:main_app) { Rails.application.routes.url_helpers }
-
1
let(:hyrax) { Hyrax::Engine.routes.url_helpers }
-
1
let(:user) { create(:user) }
-
-
1
before do
-
sign_in user
-
end
-
-
1
context 'someone elses private work' do # rubocop:disable RSpec/EmptyExampleGroup
-
# let(:work) { create(:private_data_set) }
-
#
-
# it 'shows unauthorized message' do
-
# get :show, params: { id: work }
-
# expect(response.code).to eq '401'
-
# expect(response).to render_template(:unauthorized)
-
# end
-
end
-
-
end
-
1
require 'rails_helper'
-
-
1
RSpec.describe Hyrax::DeepblueController do
-
-
1
describe "#box_enabled?" do
-
1
it "returns false" do
-
1
expect(subject.box_enabled?).to eq false
-
end
-
end
-
-
1
describe "#display_provenance_log_enabled?" do
-
1
it "returns false" do
-
1
expect(subject.display_provenance_log_enabled?).to eq false
-
end
-
end
-
-
1
describe "#doi_minting_enabled?" do
-
1
it "returns false" do
-
1
expect(subject.doi_minting_enabled?).to eq false
-
end
-
end
-
-
1
describe "#globus_download_enabled?" do
-
1
it "returns false" do
-
1
expect(subject.globus_download_enabled?).to eq false
-
end
-
end
-
-
1
describe "#tombstone_enabled?" do
-
1
it "returns false" do
-
1
expect(subject.tombstone_enabled?).to eq false
-
end
-
end
-
-
1
describe "#zip_download_enabled?" do
-
1
it "returns false" do
-
1
expect(subject.zip_download_enabled?).to eq false
-
end
-
end
-
-
end
-
# Generated via
-
# `rails generate hyrax:work Dissertation`
-
1
require 'rails_helper'
-
-
1
RSpec.describe Hyrax::DissertationsController do
-
1
it "has tests" do
-
1
skip "Add your tests here"
-
end
-
end
-
1
require 'rails_helper'
-
-
1
RSpec.describe Hyrax::FileSetsController do
-
-
1
describe 'constants' do
-
1
it do
-
1
expect( Hyrax::FileSetsController::PARAMS_KEY ).to eq 'file_set'
-
end
-
end
-
-
1
before {
-
9
allow(subject).to receive(:current_user).and_return "user1"
-
}
-
-
1
describe 'provenance_log_create' do
-
1
before {
-
1
allow(subject.curation_concern).to receive(:provenance_create).with(current_user: "user1", event_note: 'FileSetsController')
-
.and_return "provenance log creation"
-
}
-
1
it "calls curation_concern.provenance_create" do
-
1
expect(subject.curation_concern).to receive(:provenance_create).with(current_user: "user1", event_note: 'FileSetsController')
-
1
expect(subject.provenance_log_create).to eq "provenance log creation"
-
end
-
end
-
-
1
describe 'provenance_log_destroy' do
-
1
before {
-
1
allow(subject.curation_concern).to receive(:provenance_destroy).with(current_user: "user1", event_note: 'FileSetsController')
-
.and_return "provenance log destruction"
-
}
-
1
it "calls curation_concern.provenance_destroy" do
-
1
expect(subject.curation_concern).to receive(:provenance_destroy).with(current_user: "user1", event_note: 'FileSetsController')
-
1
expect(subject.provenance_log_destroy).to eq "provenance log destruction"
-
end
-
end
-
-
1
describe 'provenance_log_update_after' do
-
1
before {
-
1
subject.instance_variable_set(:@update_attr_key_values, "after key values")
-
-
1
allow(subject.curation_concern).to receive(:provenance_log_update_after).with(current_user: "user1", update_attr_key_values: "after key values")
-
.and_return "provenance log update after"
-
}
-
1
it "calls curation_concern.provenance_log_update_after" do
-
1
expect(subject.curation_concern).to receive(:provenance_log_update_after).with(current_user: "user1", update_attr_key_values: "after key values")
-
1
expect(subject.provenance_log_update_after).to eq "provenance log update after"
-
end
-
end
-
-
1
describe 'provenance_log_update_before' do
-
# NOTE: could not resolve params[PARAMS_KEY].dup
-
1
before {
-
1
allow(subject.curation_concern).to receive(:provenance_log_update_before).with( anything )
-
.and_return "provenance log update before"
-
}
-
1
it "sets instance variable to curation_concern.provenance_log_update_before" do
-
1
expect(subject.curation_concern).to receive(:provenance_log_update_before)
-
-
1
expect(subject.provenance_log_update_before).to eq "provenance log update before"
-
1
subject.instance_variable_get(:@update_attr_key_values) == "provenance log update before"
-
end
-
end
-
-
-
1
describe "display_provenance_log" do
-
1
curation = OpenStruct.new(id: 300)
-
1
main = "main app"
-
-
1
before {
-
1
allow(subject).to receive(:curation_concern).and_return curation
-
1
allow(subject).to receive(:main_app).and_return main
-
-
1
allow(Deepblue::ProvenancePath).to receive(:path_for_reference).with( 300 ).and_return "file path"
-
1
allow(Deepblue::LoggingHelper).to receive(:bold_debug).with ["DataSetsController", "display_provenance_log", "file path"]
-
1
allow(Deepblue::ProvenanceLogService).to receive(:entries).with 300, refresh: true
-
-
1
allow(subject).to receive(:redirect_to).with [main, curation]
-
}
-
1
it "calls Deepblue methods and then redirects" do
-
1
expect(subject).to receive(:redirect_to).with [main, curation]
-
-
1
subject.display_provenance_log
-
end
-
end
-
-
-
1
describe "display_provenance_log_enabled?" do
-
1
it "returns true" do
-
1
expect(subject.display_provenance_log_enabled?).to eq true
-
end
-
end
-
-
-
1
describe "provenance_log_entries_present?" do
-
1
context "provenance_log_entries has a value" do
-
1
it "returns true" do
-
1
skip "Add a test"
-
end
-
end
-
-
1
context "provenance_log_entries does not have a value" do
-
1
it "returns false" do
-
1
skip "Add a test"
-
end
-
end
-
end
-
end
-
# Generated via
-
# `rails generate hyrax:work GenericWork`
-
1
require 'rails_helper'
-
-
1
RSpec.describe Hyrax::GenericWorksController do
-
1
it "has tests" do
-
1
skip "Add your tests here"
-
end
-
end
-
1
require 'rails_helper'
-
-
1
RSpec.describe Hyrax::My::CollectionsController do
-
2
let(:subject) { described_class.new }
-
1
let(:collection) { FactoryBot.create(:collection_lw) }
-
-
1
describe "#render_bookmarks_control?" do
-
1
it "returns false" do
-
1
expect(subject.send(:render_bookmarks_control?)).to eq false
-
end
-
end
-
end
-
1
require 'rails_helper'
-
-
1
describe RobotsController do
-
3
let(:user) { create(:user) }
-
3
let(:admin) { create(:admin) }
-
4
let(:robots_txt) { ContentBlock.create(name: 'robots_txt', value: content) }
-
4
let(:content) { "User-Agent: *\nDisallow: /concern" }
-
-
1
after do
-
9
ContentBlock.delete('robots_txt')
-
end
-
-
1
describe '#show' do
-
1
it 'is blank by default' do
-
1
get :show
-
1
expect(response).to be_successful
-
1
expect(response.body).to eq ''
-
end
-
-
1
it 'renders the value' do
-
1
robots_txt
-
1
get :show
-
1
expect(response).to be_successful
-
1
expect(response.body).to eq content
-
end
-
-
1
it 'is route for /robots.txt', type: :routing do
-
1
expect(get: '/robots.txt').to route_to(controller: 'robots', action: 'show', format: 'txt')
-
end
-
end
-
-
1
describe '#edit' do
-
1
it 'is unavailable to the public' do
-
1
get :edit
-
1
expect(response).to redirect_to(new_user_session_path(locale: nil))
-
end
-
-
1
it 'is unavailable to regular users' do
-
1
sign_in user
-
1
get :edit
-
1
expect(response).to be_unauthorized
-
end
-
-
1
context 'with rendering' do
-
1
render_views
-
1
it 'is rendered for admins' do
-
1
robots_txt
-
1
sign_in admin
-
1
get :edit
-
1
expect(response).to be_successful
-
1
expect(response.body).to include(content)
-
end
-
end
-
end
-
-
1
describe '#update' do
-
4
let(:new_content) { 'Disallow: *' }
-
-
1
it 'is unavailable to the public' do
-
1
patch :update, params: { content_block: { value: new_content } }
-
1
expect(response).to redirect_to(new_user_session_path(locale: nil))
-
end
-
-
1
it 'is unavailable to regular users' do
-
1
sign_in user
-
1
patch :update, params: { content_block: { value: new_content } }
-
1
expect(response).to be_unauthorized
-
end
-
-
1
it 'is updated for admins' do
-
1
robots_txt
-
1
sign_in admin
-
1
patch :update, params: { content_block: { value: new_content } }
-
1
expect(response).to redirect_to(edit_robots_path)
-
1
get :show
-
1
expect(response.body).to eq new_content
-
end
-
end
-
end
-
1
FactoryBot.define do
-
1
factory :collection_type, class: Hyrax::CollectionType do
-
1
sequence(:title) { |n| "Collection Type #{n}" }
-
1
sequence(:machine_id) { |n| "title_#{n}" }
-
-
1
description { 'Collection type with all options' }
-
1
nestable { true }
-
1
discoverable { true }
-
1
sharable { true }
-
1
brandable { true }
-
1
share_applies_to_new_works { true }
-
1
allow_multiple_membership { true }
-
1
require_membership { false }
-
1
assigns_workflow { false }
-
1
assigns_visibility { false }
-
-
1
transient do
-
1
creator_user { nil }
-
1
creator_group { nil }
-
1
manager_user { nil }
-
1
manager_group { nil }
-
end
-
-
1
after(:create) do |collection_type, evaluator|
-
if evaluator.creator_user
-
attributes = { hyrax_collection_type_id: collection_type.id,
-
access: Hyrax::CollectionTypeParticipant::CREATE_ACCESS,
-
agent_id: evaluator.creator_user,
-
agent_type: Hyrax::CollectionTypeParticipant::USER_TYPE }
-
create(:collection_type_participant, attributes)
-
end
-
-
if evaluator.creator_group
-
attributes = { hyrax_collection_type_id: collection_type.id,
-
access: Hyrax::CollectionTypeParticipant::CREATE_ACCESS,
-
agent_id: evaluator.creator_group,
-
agent_type: Hyrax::CollectionTypeParticipant::GROUP_TYPE }
-
create(:collection_type_participant, attributes)
-
end
-
-
if evaluator.manager_user
-
attributes = { hyrax_collection_type_id: collection_type.id,
-
access: Hyrax::CollectionTypeParticipant::MANAGE_ACCESS,
-
agent_id: evaluator.manager_user,
-
agent_type: Hyrax::CollectionTypeParticipant::USER_TYPE }
-
create(:collection_type_participant, attributes)
-
end
-
-
if evaluator.manager_group
-
attributes = { hyrax_collection_type_id: collection_type.id,
-
access: Hyrax::CollectionTypeParticipant::MANAGE_ACCESS,
-
agent_id: evaluator.manager_group,
-
agent_type: Hyrax::CollectionTypeParticipant::GROUP_TYPE }
-
create(:collection_type_participant, attributes)
-
end
-
end
-
-
1
trait :nestable do
-
1
nestable { true }
-
end
-
-
1
trait :not_nestable do
-
1
nestable { false }
-
end
-
-
1
trait :discoverable do
-
1
discoverable { true }
-
end
-
-
1
trait :not_discoverable do
-
1
discoverable { false }
-
end
-
-
1
trait :brandable do
-
1
brandable { true }
-
end
-
-
1
trait :not_brandable do
-
1
brandable { false }
-
end
-
-
1
trait :sharable do
-
1
sharable { true }
-
1
share_applies_to_new_works { true }
-
end
-
-
1
trait :sharable_no_work_permissions do
-
1
sharable { true }
-
1
share_applies_to_new_works { false }
-
end
-
-
1
trait :not_sharable do
-
1
sharable { false }
-
1
share_applies_to_new_works { false }
-
end
-
-
1
trait :allow_multiple_membership do
-
1
allow_multiple_membership { true }
-
end
-
-
1
trait :not_allow_multiple_membership do
-
1
allow_multiple_membership { false }
-
end
-
end
-
-
1
factory :user_collection_type, class: Hyrax::CollectionType do
-
8
initialize_with { Hyrax::CollectionType.find_or_create_default_collection_type }
-
end
-
-
1
factory :admin_set_collection_type, class: Hyrax::CollectionType do
-
1
initialize_with { Hyrax::CollectionType.find_or_create_admin_set_type }
-
end
-
end
-
1
FactoryBot.define do
-
# Tests that create a Fedora Object are very slow. This factory lets you control which parts of the object ecosystem
-
# get built.
-
#
-
# PREFERRED: Use build whenever possible. You can control the creation of the permission template, collection type, and
-
# solr document by passing parameters to the build(:collection_lw) method. That way you can build only the parts
-
# needed for a specific test.
-
#
-
# AVOID: Do not use create unless absolutely necessary. It will create everything including the Fedora object.
-
#
-
# @example Simple build of a collection with no additional parts created. Lightest weight.
-
# NOTE: A user is automatically created as the owner of the collection.
-
# let(:collection) { build(:collection_lw) }
-
#
-
# @example Simple build of a collection with no additional parts created. User is the owner of the collection. Lightest weight.
-
# let(:collection) { build(:collection_lw, user:) }
-
#
-
# @example Simple build of a collection with only solr-document. Owner is given edit-access in solr-document. Light weight.
-
# let(:collection) { build(:collection_lw, with_solr_document: true) }
-
#
-
# @example Simple build of a collection with only a permission template created. Owner is set as a manager. Light weight.
-
# let(:collection) { build(:collection_lw, with_permission_template: true) }
-
#
-
# @example Build a collection with only a permission template created. Permissions are set based on
-
# attributes set for `with_permission_template`. Middle weight.
-
# # permissions passed thru `with_permission_template` can be any of the following in any combination
-
# let(:permissions) { { manage_users: [user.user_key], # multiple users can be listed
-
# deposit_users: [user.user_key],
-
# view_users: [user.user_key],
-
# manage_groups: [group_name], # multiple groups can be listed
-
# deposit_groups: [group_name],
-
# view_groups: [group_name], } }
-
# let(:collection) { build(:collection_lw, user: , with_permission_template: permissions) }
-
#
-
# @example Build a collection with permission template and solr-document created. Permissions are set based on
-
# attributes set for `with_permission_template`. Solr-document includes read/edit access defined based
-
# on attributes passed thru `with_permission_template`. Middle weight.
-
# # permissions passed thru `with_permission_template` can be any of the following in any combination
-
# let(:permissions) { { manage_users: [user.user_key], # multiple users can be listed
-
# deposit_users: [user.user_key],
-
# view_users: [user.user_key],
-
# manage_groups: [group_name], # multiple groups can be listed
-
# deposit_groups: [group_name],
-
# view_groups: [group_name], } }
-
# let(:collection) { build(:collection_lw, user: , with_permission_template: permissions, with_solr_document: true) }
-
#
-
# @example Build a collection generating its collection type with specific settings. Light Weight.
-
# NOTE: Do not use this approach if you need access to the collection type in the test.
-
# DEFAULT: If `collection_type_settings` and `collection_type_gid` are not specified, then the default
-
# User Collection type will be used.
-
# # Any not specified default to ON. At least one setting should be specified.
-
# let(:settings) { [
-
# :nestable, # OR :not_nestable,
-
# :discoverable, # OR :not_discoverable
-
# :brandable, # OR :not_brandable
-
# :sharable, # OR :not_sharable OR :sharable_no_work_permissions
-
# :allow_multiple_membership, # OR :not_allow_multiple_membership
-
# ] }
-
# let(:collection) { build(:collection_lw, collection_type_settings: settings) }
-
#
-
# @example Build a collection using the passed in collection type. Light Weight.
-
# NOTE: Use this approach if you need access to the collection type in the test.
-
# # Any not specified default to ON. At least one setting should be specified.
-
# let(:settings) { [
-
# :nestable, # OR :not_nestable,
-
# :discoverable, # OR :not_discoverable
-
# :brandable, # OR :not_brandable
-
# :sharable, # OR :not_sharable OR :sharable_no_work_permissions
-
# :allow_multiple_membership, # OR :not_allow_multiple_membership
-
# ] }
-
# let(:collection_type) { create(:collection_lw_type, settings) }
-
# let(:collection) { build(:collection_lw, collection_type_gid: collection_type.gid) }
-
#
-
# @example Build a collection with nesting fields set in the solr document. Light weight.
-
# NOTE: The property `with_nesting_attributes` is only supported for building collections. The attributes will
-
# be overwritten by the save process when creating a collection, thus effectively ignoring this property.
-
# let(:collection) { build(:collection_lw, with_nesting_attributes: { ancestors: ['Parent_1'],
-
# parent_ids: ['Parent_1'],
-
# pathnames: ['Parent_1/Collection123'],
-
# depth: 2 }) }
-
#
-
# @example Create a collection with everything. Extreme heavy weight. This is very slow and should be avoided.
-
# NOTE: Everything gets created.
-
# NOTE: Build options effect created collections as follows...
-
# * `with_permission_template` can specify user/group permissions. A permission template is always created.
-
# * `collection_type_settings` can specify to create a collection type with specific settings
-
# * `with_solr_document` is ignored. A solr document is always created.
-
# * `with_nested_attributes` is ignored.
-
# NOTE: Additional process is required for testing nested collections with Fedora objects. See next example.
-
# let(:collection) { create(:collection_lw) }
-
#
-
# @example Create collections for use with nested collection index testing.
-
# NOTE: For light weight nested collection testing using solr documents only, see `with_nested_attributes` above
-
# NOTE: Full indexed nested collections with solr documents and Fedora objects are created by...
-
# * creating multiple collections (expensive)
-
# * nesting them and saving - causing reindex of the Fedora objects (expensive)
-
# For tests of nesting functionality requiring the Fedora object and reindexing, in the test itself
-
# include `:with_nested_reindexing`
-
# it "returns the collection and its members", :with_nested_reindexing do
-
-
1
factory :collection_lw, class: Collection do
-
1
transient do
-
2
user { create(:user) }
-
-
8
collection_type_settings { nil }
-
1
with_permission_template { false }
-
1
with_nesting_attributes { nil }
-
1
with_solr_document { false }
-
end
-
2
sequence(:title) { |n| ["Collection Title #{n}"] }
-
-
1
after(:build) do |collection, evaluator|
-
7
collection.apply_depositor_metadata(evaluator.user.user_key)
-
-
7
CollectionLwFactoryHelper.process_collection_type_settings(collection, evaluator)
-
CollectionLwFactoryHelper.process_with_permission_template(collection, evaluator)
-
CollectionLwFactoryHelper.process_with_solr_document(collection, evaluator)
-
CollectionLwFactoryHelper.process_with_nesting_attributes(collection, evaluator)
-
end
-
-
1
before(:create) do |collection, evaluator|
-
# force create a permission template if it doesn't exist for the newly created collection
-
CollectionLwFactoryHelper.process_with_permission_template(collection, evaluator, true) unless evaluator.with_permission_template
-
end
-
-
1
after(:create) do |collection, _evaluator|
-
collection.reset_access_controls!
-
end
-
-
1
factory :public_collection_lw, traits: [:public_lw]
-
-
1
factory :private_collection_lw do
-
1
visibility { Hydra::AccessControls::AccessRight::VISIBILITY_TEXT_VALUE_PRIVATE }
-
end
-
-
1
factory :institution_collection_lw do
-
1
visibility { Hydra::AccessControls::AccessRight::VISIBILITY_TEXT_VALUE_AUTHENTICATED }
-
end
-
-
1
factory :named_collection_lw do
-
1
title { ['collection title'] }
-
1
description { ['collection description'] }
-
end
-
-
1
trait :public_lw do
-
1
visibility { Hydra::AccessControls::AccessRight::VISIBILITY_TEXT_VALUE_PUBLIC }
-
end
-
-
1
trait :private_lw do
-
1
visibility { Hydra::AccessControls::AccessRight::VISIBILITY_TEXT_VALUE_PRIVATE }
-
end
-
-
1
trait :institution_lw do
-
1
visibility { Hydra::AccessControls::AccessRight::VISIBILITY_TEXT_VALUE_AUTHENTICATED }
-
end
-
-
1
trait :public_lw do
-
1
visibility { Hydra::AccessControls::AccessRight::VISIBILITY_TEXT_VALUE_PUBLIC }
-
end
-
end
-
-
1
factory :user_collection_lw, class: Collection do
-
1
transient do
-
1
user { create(:user) }
-
end
-
-
1
sequence(:title) { |n| ["User Collection Title #{n}"] }
-
-
1
after(:build) do |collection, evaluator|
-
collection.apply_depositor_metadata(evaluator.user.user_key)
-
collection_type = create(:user_collection_type)
-
collection.collection_type_gid = collection_type.gid
-
end
-
end
-
-
1
factory :typeless_collection_lw, class: Collection do
-
# To create a pre-Hyrax 2.1.0 collection without a collection type gid...
-
# col = build(:typeless_collection, ...)
-
# col.save(validate: false)
-
1
transient do
-
1
user { create(:user) }
-
1
with_permission_template { false }
-
1
do_save { false }
-
end
-
-
1
sequence(:title) { |n| ["Typeless Collection Title #{n}"] }
-
-
1
after(:build) do |collection, evaluator|
-
collection.apply_depositor_metadata(evaluator.user.user_key)
-
collection.save(validate: false) if evaluator.do_save || evaluator.with_permission_template
-
if evaluator.with_permission_template
-
attributes = { source_id: collection.id }
-
attributes[:manage_users] = [evaluator.user]
-
attributes = evaluator.with_permission_template.merge(attributes) if evaluator.with_permission_template.respond_to?(:merge)
-
create(:permission_template, attributes) unless Hyrax::PermissionTemplate.find_by(source_id: collection.id)
-
end
-
end
-
end
-
-
1
class CollectionLwFactoryHelper
-
# @returns array of user keys
-
1
def self.permission_from_template(permission_template_attributes, permission_key)
-
permissions = []
-
return permissions if permission_template_attributes.blank?
-
return permissions unless permission_template_attributes.is_a? Hash
-
return permissions unless permission_template_attributes.key?(permission_key)
-
permission_template_attributes[permission_key]
-
end
-
1
private_class_method :permission_from_template
-
-
# @param [Hash] permission_template_attributes where names identify the role and value are the user keys for that role
-
# @parem [String] creator_user is the user who created the new collection
-
# @param [Boolean] include_creator, when true, adds the creator_user as a manager
-
# @returns array of user keys
-
1
def self.user_managers(permission_template_attributes, creator_user)
-
managers = permission_from_template(permission_template_attributes, :manage_users)
-
managers << creator_user
-
managers
-
end
-
-
# @param [Hash] permission_template_attributes where names identify the role and value are the user keys for that role
-
# @returns array of user keys
-
1
def self.group_managers(permission_template_attributes)
-
permission_from_template(permission_template_attributes, :manage_groups)
-
end
-
-
# @param [Hash] permission_template_attributes where names identify the role and value are the user keys for that role
-
# @returns array of user keys
-
1
def self.user_depositors(permission_template_attributes)
-
permission_from_template(permission_template_attributes, :deposit_users)
-
end
-
-
# @param [Hash] permission_template_attributes where names identify the role and value are the user keys for that role
-
# @returns array of user keys
-
1
def self.group_depositors(permission_template_attributes)
-
permission_from_template(permission_template_attributes, :deposit_groups)
-
end
-
-
# @param [Hash] permission_template_attributes where names identify the role and value are the user keys for that role
-
# @returns array of user keys
-
1
def self.user_viewers(permission_template_attributes)
-
permission_from_template(permission_template_attributes, :view_users)
-
end
-
-
# @param [Hash] permission_template_attributes where names identify the role and value are the user keys for that role
-
# @returns array of user keys
-
1
def self.group_viewers(permission_template_attributes)
-
permission_from_template(permission_template_attributes, :view_groups)
-
end
-
-
# Process the collection_type_settings transient property such that...
-
# * creates the collection type with specified settings if collection_type_settings has settings (ignores collection_type_gid)
-
# * uses passed in collection type if collection_type_gid is specified AND collection_type_settings is nil
-
# * uses default User Collection type if neither are specified
-
# @param [Collection] collection object being built/created by the factory
-
# @param [Class] evaluator holding the transient properties for the current build/creation process
-
1
def self.process_collection_type_settings(collection, evaluator)
-
7
if evaluator.collection_type_settings.present?
-
collection.collection_type = FactoryBot.create(:collection_type, *evaluator.collection_type_settings)
-
7
elsif collection.collection_type_gid.blank?
-
7
collection.collection_type = FactoryBot.create(:user_collection_type)
-
end
-
end
-
-
# Process the with_permission_template transient property such that...
-
# * a permission template is created for the collection
-
# * a permission template access is created for the collection creator
-
# * additional permission template accesses are created for each user/group identified in the attributes
-
# of with_permission_template (created by the permission_template factory)
-
# @param [Collection] collection object being built/created by the factory
-
# @param [Class] evaluator holding the transient properties for the current build/creation process
-
# @param [Boolean] if true, force the permission template to be created
-
1
def self.process_with_permission_template(collection, evaluator, force = false)
-
return unless force || evaluator.with_permission_template || RSpec.current_example.metadata[:with_nested_reindexing]
-
collection.id ||= FactoryBot.generate(:object_id)
-
attributes = { source_id: collection.id }
-
attributes[:manage_users] = user_managers(evaluator.with_permission_template, evaluator.user)
-
attributes = evaluator.with_permission_template.merge(attributes) if evaluator.with_permission_template.respond_to?(:merge)
-
FactoryBot.create(:permission_template, attributes) unless Hyrax::PermissionTemplate.find_by(source_id: collection.id)
-
end
-
-
# Process the with_nesting_attributes transient property such that...
-
# * adds nesting related solr-document fields for ancestors, parent_ids, pathnames, and depth
-
# @param [Collection] collection object being built/created by the factory
-
# @param [Class] evaluator holding the transient properties for the current build/creation process
-
1
def self.process_with_nesting_attributes(collection, evaluator)
-
return unless evaluator.with_nesting_attributes.present? && collection.nestable?
-
Hyrax::Adapters::NestingIndexAdapter.add_nesting_attributes(
-
solr_doc: solr_document_with_permissions(collection, evaluator),
-
ancestors: evaluator.with_nesting_attributes[:ancestors],
-
parent_ids: evaluator.with_nesting_attributes[:parent_ids],
-
pathnames: evaluator.with_nesting_attributes[:pathnames],
-
depth: evaluator.with_nesting_attributes[:depth]
-
)
-
end
-
-
# Process the with_solr_document transient property such that...
-
# * a solr document is created for the collection
-
# * permissions identified by with_permission_template, if any, are added to the solr fields
-
# @param [Collection] collection object being built/created by the factory
-
# @param [Class] evaluator holding the transient properties for the current build/creation process
-
1
def self.process_with_solr_document(collection, evaluator)
-
return unless evaluator.with_solr_document
-
return if evaluator.with_nesting_attributes.present? && collection.nestable? # will create the solr document there instead
-
ActiveFedora::SolrService.add(solr_document_with_permissions(collection, evaluator), commit: true)
-
end
-
-
# Return the collection's solr document with permissions added, such that...
-
# * permissions identified by with_permission_template, if any, are added to the solr fields
-
# @param [Collection] collection object being built/created by the factory
-
# @param [Class] evaluator holding the transient properties for the current build/creation process
-
# @returns the collection's solr document with permissions added
-
1
def self.solr_document_with_permissions(collection, evaluator)
-
collection.id ||= FactoryBot.generate(:object_id)
-
collection.edit_users = user_managers(evaluator.with_permission_template, evaluator.user)
-
collection.edit_groups = group_managers(evaluator.with_permission_template)
-
collection.read_users = user_viewers(evaluator.with_permission_template) +
-
user_depositors(evaluator.with_permission_template)
-
collection.read_groups = group_viewers(evaluator.with_permission_template) +
-
group_depositors(evaluator.with_permission_template)
-
collection.to_solr
-
end
-
1
private_class_method :solr_document_with_permissions
-
end
-
end
-
# frozen_string_literal: true
-
-
1
FactoryBot.define do
-
1
factory :data_set, aliases: [:data_set_work], class: ::DataSet do
-
-
1
transient do
-
1
user { create(:user) }
-
# Set to true (or a hash) if you want to create an admin set
-
1
with_admin_set { false }
-
end
-
-
# It is reasonable to assume that a work has an admin set; However, we don't want to
-
# go through the entire rigors of creating that admin set.
-
1
before(:create) do |work, evaluator|
-
if evaluator.with_admin_set
-
attributes = {}
-
attributes[:id] = work.admin_set_id if work.admin_set_id.present?
-
attributes = evaluator.with_admin_set.merge(attributes) if evaluator.with_admin_set.respond_to?(:merge)
-
admin_set = create(:admin_set, attributes)
-
work.admin_set_id = admin_set.id
-
end
-
end
-
-
1
after(:create) do |work, _evaluator|
-
work.save! if work.member_of_collections.present?
-
end
-
-
1
title { ["Test title"] }
-
1
visibility { Hydra::AccessControls::AccessRight::VISIBILITY_TEXT_VALUE_PRIVATE }
-
-
1
authoremail { "test@umich.edu" }
-
1
description { ["This is the description."] }
-
1
methodology { "The Methodology" }
-
-
1
after(:build) do |work, evaluator|
-
work.apply_depositor_metadata(evaluator.user.user_key)
-
end
-
-
1
factory :public_data_set, aliases: [:public_data_set_data_set], traits: [:public]
-
-
1
trait :public do
-
1
visibility { Hydra::AccessControls::AccessRight::VISIBILITY_TEXT_VALUE_PUBLIC }
-
end
-
-
1
factory :private_data_set do
-
# visibility Hydra::AccessControls::AccessRight::VISIBILITY_TEXT_VALUE_PRIVATE
-
end
-
-
1
factory :registered_data_set do
-
1
read_groups { ["registered"] }
-
end
-
-
1
factory :data_set_with_one_file do
-
1
before(:create) do |work, evaluator|
-
work.ordered_members << create(:file_set, user: evaluator.user, title: ['A Contained FileSet'], label: 'filename.pdf')
-
end
-
end
-
-
1
factory :data_set_with_files do
-
1
before(:create) { |work, evaluator| 2.times { work.ordered_members << create(:file_set, user: evaluator.user) } }
-
end
-
-
1
factory :data_set_with_ordered_files do
-
1
before(:create) do |work, evaluator|
-
work.ordered_members << create(:file_set, user: evaluator.user)
-
work.ordered_member_proxies.insert_target_at(0, create(:file_set, user: evaluator.user))
-
end
-
end
-
-
1
factory :data_set_with_one_child do
-
1
before(:create) do |work, evaluator|
-
work.ordered_members << create(:work, user: evaluator.user, title: ['A Contained Work'])
-
end
-
end
-
-
1
factory :data_set_with_two_children do
-
1
before(:create) do |work, evaluator|
-
work.ordered_members << create(:work, user: evaluator.user, title: ['A Contained Work'], id: "BlahBlah1")
-
work.ordered_members << create(:work, user: evaluator.user, title: ['Another Contained Work'], id: "BlahBlah2")
-
end
-
end
-
-
1
factory :data_set_with_representative_file do
-
1
before(:create) do |work, evaluator|
-
work.ordered_members << create(:file_set, user: evaluator.user, title: ['A Contained FileSet'])
-
work.representative_id = work.members[0].id
-
end
-
end
-
-
1
factory :data_set_with_file_and_data_set do
-
1
before(:create) do |work, evaluator|
-
work.ordered_members << create(:file_set, user: evaluator.user)
-
work.ordered_members << create(:work, user: evaluator.user)
-
end
-
end
-
-
1
factory :with_embargo_date do
-
# build with defaults:
-
# let(:work) { create(:embargoed_data_set) }
-
-
# build with specific values:
-
# let(:embargo_attributes) do
-
# { embargo_date: Date.tomorrow.to_s,
-
# current_state: Hydra::AccessControls::AccessRight::VISIBILITY_TEXT_VALUE_PRIVATE,
-
# future_state: Hydra::AccessControls::AccessRight::VISIBILITY_TEXT_VALUE_PUBLIC }
-
# end
-
# let(:work) { create(:embargoed_data_set, with_embargo_attributes: embargo_attributes) }
-
-
1
transient do
-
1
with_embargo_attributes { false }
-
1
embargo_date { Date.tomorrow.to_s }
-
1
current_state { Hydra::AccessControls::AccessRight::VISIBILITY_TEXT_VALUE_PRIVATE }
-
1
future_state { Hydra::AccessControls::AccessRight::VISIBILITY_TEXT_VALUE_PUBLIC }
-
end
-
1
factory :embargoed_data_set do
-
1
after(:build) do |work, evaluator|
-
if evaluator.with_embargo_attributes
-
work.apply_embargo(evaluator.with_embargo_attributes[:embargo_date],
-
evaluator.with_embargo_attributes[:current_state],
-
evaluator.with_embargo_attributes[:future_state])
-
else
-
work.apply_embargo(evaluator.embargo_date,
-
evaluator.current_state,
-
evaluator.future_state)
-
end
-
end
-
end
-
1
factory :embargoed_data_set_with_files do
-
1
after(:build) do |work, evaluator|
-
if evaluator.with_embargo_attributes
-
work.apply_embargo(evaluator.with_embargo_attributes[:embargo_date],
-
evaluator.with_embargo_attributes[:current_state],
-
evaluator.with_embargo_attributes[:future_state])
-
else
-
work.apply_embargo(evaluator.embargo_date,
-
evaluator.current_state,
-
evaluator.future_state)
-
end
-
end
-
1
after(:create) { |work, evaluator| 2.times { work.ordered_members << create(:file_set, user: evaluator.user) } }
-
end
-
end
-
-
1
factory :with_lease_date do
-
# build with defaults:
-
# let(:work) { create(:leased_data_set) }
-
-
# build with specific values:
-
# let(:lease_attributes) do
-
# { lease_date: Date.tomorrow.to_s,
-
# current_state: Hydra::AccessControls::AccessRight::VISIBILITY_TEXT_VALUE_PUBLIC,
-
# future_state: Hydra::AccessControls::AccessRight::VISIBILITY_TEXT_VALUE_AUTHENTICATED }
-
# end
-
# let(:work) { create(:leased_data_set, with_lease_attributes: lease_attributes) }
-
-
1
transient do
-
1
with_lease_attributes { false }
-
1
lease_date { Date.tomorrow.to_s }
-
1
current_state { Hydra::AccessControls::AccessRight::VISIBILITY_TEXT_VALUE_PUBLIC }
-
1
future_state { Hydra::AccessControls::AccessRight::VISIBILITY_TEXT_VALUE_PRIVATE }
-
end
-
1
factory :leased_data_set do
-
1
after(:build) do |work, evaluator|
-
if evaluator.with_lease_attributes
-
work.apply_lease(evaluator.with_lease_attributes[:lease_date],
-
evaluator.with_lease_attributes[:current_state],
-
evaluator.with_lease_attributes[:future_state])
-
else
-
work.apply_lease(evaluator.lease_date,
-
evaluator.current_state,
-
evaluator.future_state)
-
end
-
end
-
end
-
1
factory :leased_data_set_with_files do
-
1
after(:build) do |work, evaluator|
-
if evaluator.with_lease_attributes
-
work.apply_lease(evaluator.with_lease_attributes[:lease_date],
-
evaluator.with_lease_attributes[:current_state],
-
evaluator.with_lease_attributes[:future_state])
-
else
-
work.apply_lease(evaluator.lease_date,
-
evaluator.current_state,
-
evaluator.future_state)
-
end
-
end
-
1
after(:create) { |work, evaluator| 2.times { work.ordered_members << create(:file_set, user: evaluator.user) } }
-
end
-
end
-
end
-
-
# Doesn't set up any edit_users
-
1
factory :data_set_without_access, class: DataSet do
-
1
title { ['Test title'] }
-
1
depositor { create(:user).user_key }
-
end
-
end
-
# frozen_string_literal: true
-
-
1
FactoryBot.define do
-
1
factory :file_set do
-
1
transient do
-
1
user { create(:user) }
-
1
content { nil }
-
end
-
1
after(:build) do |fs, evaluator|
-
fs.apply_depositor_metadata evaluator.user.user_key
-
end
-
-
1
after(:create) do |file, evaluator|
-
Hydra::Works::UploadFileToFileSet.call(file, evaluator.content) if evaluator.content
-
end
-
-
1
trait :public do
-
1
read_groups { ["public"] }
-
end
-
-
1
trait :registered do
-
1
read_groups { ["registered"] }
-
end
-
-
1
factory :file_with_work do
-
1
after(:build) do |file, _evaluator|
-
file.title = ['testfile']
-
end
-
1
after(:create) do |file, evaluator|
-
Hydra::Works::UploadFileToFileSet.call(file, evaluator.content) if evaluator.content
-
create(:work, user: evaluator.user).members << file
-
end
-
end
-
end
-
end
-
# frozen_string_literal: true
-
# Defines a new sequence
-
1
FactoryBot.define do
-
1
sequence :object_id do |n|
-
"object_id_#{n}"
-
end
-
end
-
1
FactoryBot.define do
-
1
factory :permission_template_access, class: Hyrax::PermissionTemplateAccess do
-
1
permission_template
-
1
trait :manage do
-
1
access { 'manage' }
-
end
-
-
1
trait :deposit do
-
1
access { 'deposit' }
-
end
-
-
1
trait :view do
-
1
access { 'view' }
-
end
-
end
-
end
-
# frozen_string_literal: true
-
-
1
FactoryBot.define do
-
1
factory :permission_template, class: Hyrax::PermissionTemplate do
-
# Given that there is a one to one strong relation between permission_template and admin_set,
-
# with a unique index on the source_id, I don't want to have duplication in source_id
-
1
sequence(:source_id) { |n| format("%010d", n) }
-
-
1
before(:create) do |permission_template, evaluator|
-
if evaluator.with_admin_set
-
source_id = permission_template.source_id
-
admin_set =
-
if source_id.present?
-
begin
-
AdminSet.find(source_id)
-
rescue ActiveFedora::ObjectNotFoundError
-
create(:admin_set, id: source_id)
-
end
-
else
-
create(:admin_set)
-
end
-
permission_template.source_id = admin_set.id
-
elsif evaluator.with_collection
-
source_id = permission_template.source_id
-
collection =
-
if source_id.present?
-
begin
-
Collection.find(source_id)
-
rescue ActiveFedora::ObjectNotFoundError
-
create(:collection, id: source_id)
-
end
-
else
-
create(:collection)
-
end
-
permission_template.source_id = collection.id
-
end
-
end
-
-
1
after(:create) do |permission_template, evaluator|
-
if evaluator.with_workflows
-
Hyrax::Workflow::WorkflowImporter.load_workflow_for(permission_template: permission_template)
-
Sipity::Workflow.activate!(permission_template: permission_template, workflow_id: permission_template.available_workflows.pluck(:id).first)
-
end
-
if evaluator.with_active_workflow
-
workflow = create(:workflow, active: true, permission_template: permission_template)
-
create(:workflow_action, workflow: workflow) # Need to create a single action that can be taken
-
end
-
AccessHelper.create_access(permission_template, 'user', :manage, evaluator.manage_users) if evaluator.manage_users.present?
-
AccessHelper.create_access(permission_template, 'group', :manage, evaluator.manage_groups) if evaluator.manage_groups.present?
-
AccessHelper.create_access(permission_template, 'user', :deposit, evaluator.deposit_users) if evaluator.deposit_users.present?
-
AccessHelper.create_access(permission_template, 'group', :deposit, evaluator.deposit_groups) if evaluator.deposit_groups.present?
-
AccessHelper.create_access(permission_template, 'user', :view, evaluator.view_users) if evaluator.view_users.present?
-
AccessHelper.create_access(permission_template, 'group', :view, evaluator.view_groups) if evaluator.view_groups.present?
-
end
-
-
1
transient do
-
1
with_admin_set { false }
-
1
with_collection { false }
-
1
with_workflows { false }
-
1
with_active_workflow { false }
-
1
manage_users { nil }
-
1
manage_groups { nil }
-
1
deposit_users { nil }
-
1
deposit_groups { nil }
-
1
view_users { nil }
-
1
view_groups { nil }
-
end
-
end
-
-
1
class AccessHelper
-
1
def self.create_access(permission_template_id, agent_type, access, agent_ids)
-
agent_ids.each do |agent_id|
-
FactoryBot.create(:permission_template_access,
-
access,
-
permission_template: permission_template_id,
-
agent_type: agent_type,
-
agent_id: agent_id)
-
end
-
end
-
end
-
end
-
# frozen_string_literal: true
-
-
1
FactoryBot.define do
-
1
factory :user do
-
34
sequence(:email) { |n| "user#{n}@example.com" }
-
34
encrypted_password { 'password' }
-
-
1
transient do
-
# Allow for custom groups when a user is instantiated.
-
# @example create(:user, groups: 'avacado')
-
34
groups { [] }
-
end
-
-
# TODO: Register the groups for the given user key such that we can remove the following from other specs:
-
# `allow(::User.group_service).to receive(:byname).and_return(user.user_key => ['admin'])``
-
1
after(:build) do |user, evaluator|
-
# In case we have the instance but it has not been persisted
-
33
::RSpec::Mocks.allow_message(user, :groups).and_return(Array.wrap(evaluator.groups))
-
# Given that we are stubbing the class, we need to allow for the original to be called
-
33
::RSpec::Mocks.allow_message(user.class.group_service, :fetch_groups).and_call_original
-
# We need to ensure that each instantiation of the admin user behaves as expected.
-
# This resolves the issue of both the created object being used as well as re-finding the created object.
-
33
::RSpec::Mocks.allow_message(user.class.group_service, :fetch_groups).with(user: user).and_return(Array.wrap(evaluator.groups))
-
end
-
-
1
factory :admin do
-
6
roles { [Role.where(name: 'admin').first_or_create] }
-
end
-
-
1
factory :user_with_mail do
-
1
after(:create) do |user|
-
# Create examples of single file successes and failures
-
(1..10).each do |number|
-
file = MockFile.new(number.to_s, "Single File #{number}")
-
User.batch_user.send_message(user, 'File 1 could not be updated. You do not have sufficient privileges to edit it.', file.to_s, false)
-
User.batch_user.send_message(user, 'File 1 has been saved', file.to_s, false)
-
end
-
-
# Create examples of mulitple file successes and failures
-
files = []
-
(1..50).each do |number|
-
files << MockFile.new(number.to_s, "File #{number}")
-
end
-
User.batch_user.send_message(user, 'These files could not be updated. You do not have sufficient privileges to edit them.', 'Batch upload permission denied', false)
-
User.batch_user.send_message(user, 'These files have been saved', 'Batch upload complete', false)
-
end
-
end
-
end
-
-
1
trait :guest do
-
1
guest { true }
-
end
-
end
-
-
1
class MockFile
-
1
attr_accessor :to_s, :id
-
1
def initialize(id, string)
-
self.id = id
-
self.to_s = string
-
end
-
end
-
1
FactoryBot.define do
-
1
factory :workflow, class: Sipity::Workflow do
-
1
sequence(:name) { |n| "generic_work-#{n}" }
-
1
permission_template
-
end
-
end
-
1
require 'rails_helper'
-
1
include Warden::Test::Helpers
-
-
# NOTE: If you generated more than one work, you have to set "js: true"
-
1
RSpec.feature 'Create a DataSet', js: true do
-
-
# before(:all ) do
-
# puts "DataSet ids before=#{DataSet.all.map { |ds| ds.id }}"
-
# #puts "FileSet ids before=#{FileSet.all.map { |fs| fs.id }}"
-
# end
-
#
-
# after(:all ) do
-
# #puts "FileSet ids after=#{FileSet.all.map { |fs| fs.id }}"
-
# puts "DataSet ids after=#{DataSet.all.map { |ds| ds.id }}"
-
# # clean up created DataSet
-
# DataSet.all.each { |ds| ds.delete }
-
# #FileSet.all.each { |fs| fs.delete }
-
# end
-
-
1
context 'a logged in user' do
-
1
let(:user_attributes) do
-
{ email: 'test@example.com' }
-
end
-
1
let(:user) do
-
User.new(user_attributes) { |u| u.save(validate: false) }
-
end
-
2
let(:admin_set_id) { AdminSet.find_or_create_default_admin_set_id }
-
2
let(:permission_template) { Hyrax::PermissionTemplate.find_or_create_by!(source_id: admin_set_id) }
-
2
let(:workflow) { Sipity::Workflow.create!(active: true, name: 'test-workflow', permission_template: permission_template) }
-
-
1
before do
-
# Create a single action that can be taken
-
1
Sipity::WorkflowAction.create!(name: 'submit', workflow: workflow)
-
-
# Grant the user access to deposit into the admin set.
-
Hyrax::PermissionTemplateAccess.create!(
-
permission_template_id: permission_template.id,
-
agent_type: 'user',
-
agent_id: user.user_key,
-
access: 'deposit'
-
)
-
login_as user
-
end
-
-
1
scenario do
-
# visit '/dashboard'
-
# click_link "Works"
-
# expect(page).to have_content "Add new work"
-
# click_link "Add new work"
-
#
-
# # If you generate more than one work uncomment these lines
-
# choose "payload_concern", option: "DataSet"
-
# click_button "Create work"
-
# sleep 2 # seems to make this work
-
# expect(page).to have_content "Add New Data Set"
-
# click_link "Files" # switch tab
-
# expect(page).to have_content "Add files"
-
# expect(page).to have_content "Add folder"
-
# within('span#addfiles') do
-
# attach_file("files[]", "#{Hyrax::Engine.root}/spec/fixtures/image.jp2", visible: false)
-
# attach_file("files[]", "#{Hyrax::Engine.root}/spec/fixtures/jp2_fits.xml", visible: false)
-
# end
-
# click_link "Descriptions" # switch tab
-
# fill_in('Title', with: 'My Test Work')
-
# fill_in('Creator', with: 'Doe, Jane')
-
# fill_in('Authoremail', with: 'test@test.com' )
-
# fill_in('Keyword', with: 'testing')
-
# fill_in('Abstract or Summary', with: 'This is the description.' )
-
# select('In Copyright', from: 'Rights statement')
-
#
-
# # With selenium and the chrome driver, focus remains on the
-
# # select box. Click outside the box so the next line can't find
-
# # its element
-
# find('body').click
-
# choose('data_set_visibility_open')
-
# expect(page).to have_content('Please note, making something visible to the world (i.e. marking this as Public) may be viewed as publishing which could impact your ability to')
-
#
-
# # the upload of files fails with:
-
# # 2018-05-04 12:02:27 -0400: Rack app error handling request { POST /uploads/ }
-
# # #<ActiveRecord::StatementInvalid: SQLite3::BusyException: database is locked: INSERT INTO "uploaded_files" ("file", "user_id", "created_at", "updated_at") VALUES (?, ?, ?, ?)>
-
# # check('agreement')
-
# # click_on('Save')
-
# # expect(page).to have_content('My Test Work')
-
# # expect(page).to have_content "Your files are being processed by Hyrax in the background."
-
end
-
end
-
-
end
-
# Generated via
-
# `rails generate hyrax:work Dissertation`
-
1
require 'rails_helper'
-
1
include Warden::Test::Helpers
-
-
# NOTE: If you generated more than one work, you have to set "js: true"
-
1
RSpec.feature 'Create a Dissertation', js: true do
-
-
1
context 'a logged in user' do
-
1
let(:user_attributes) do
-
{ email: 'test@example.com' }
-
end
-
1
let(:user) do
-
User.new(user_attributes) { |u| u.save(validate: false) }
-
end
-
2
let(:admin_set_id) { AdminSet.find_or_create_default_admin_set_id }
-
2
let(:permission_template) { Hyrax::PermissionTemplate.find_or_create_by!(source_id: admin_set_id) }
-
2
let(:workflow) { Sipity::Workflow.create!(active: true, name: 'test-workflow', permission_template: permission_template) }
-
-
1
before do
-
# Create a single action that can be taken
-
1
Sipity::WorkflowAction.create!(name: 'submit', workflow: workflow)
-
-
# Grant the user access to deposit into the admin set.
-
Hyrax::PermissionTemplateAccess.create!(
-
permission_template_id: permission_template.id,
-
agent_type: 'user',
-
agent_id: user.user_key,
-
access: 'deposit'
-
)
-
login_as user
-
end
-
-
1
scenario do
-
# visit '/dashboard'
-
# click_link "Works"
-
# click_link "Add new work"
-
#
-
# # If you generate more than one work uncomment these lines
-
# choose "payload_concern", option: "Dissertation"
-
# click_button "Create work"
-
# sleep 2 # seems to make this work
-
# expect(page).to have_content "Add New Dissertation"
-
# click_link "Files" # switch tab
-
# expect(page).to have_content "Add files"
-
# expect(page).to have_content "Add folder"
-
# within('span#addfiles') do
-
# attach_file("files[]", "#{Hyrax::Engine.root}/spec/fixtures/image.jp2", visible: false)
-
# attach_file("files[]", "#{Hyrax::Engine.root}/spec/fixtures/jp2_fits.xml", visible: false)
-
# end
-
# click_link "Descriptions" # switch tab
-
# fill_in('Title', with: 'My Test Work')
-
# fill_in('Creator', with: 'Doe, Jane')
-
# #fill_in('Keyword', with: 'testing')
-
# select('In Copyright', from: 'Rights statement')
-
#
-
# # With selenium and the chrome driver, focus remains on the
-
# # select box. Click outside the box so the next line can't find
-
# # its element
-
# find('body').click
-
# choose('dissertation_visibility_open')
-
# expect(page).to have_content('Please note, making something visible to the world (i.e. marking this as Public) may be viewed as publishing which could impact your ability to')
-
#
-
# # the upload of files fails with:
-
# # 2018-05-04 12:04:05 -0400: Rack app error handling request { POST /uploads/ }
-
# # #<ActiveRecord::StatementInvalid: SQLite3::BusyException: database is locked: INSERT INTO "uploaded_files" ("file", "user_id", "created_at", "updated_at") VALUES (?, ?, ?, ?)>
-
# # check('agreement')
-
# # click_on('Save')
-
# # expect(page).to have_content('My Test Work')
-
# # expect(page).to have_content "Your files are being processed by Hyrax in the background."
-
end
-
end
-
-
end
-
# Generated via
-
# `rails generate hyrax:work GenericWork`
-
1
require 'rails_helper'
-
1
include Warden::Test::Helpers
-
-
# NOTE: If you generated more than one work, you have to set "js: true"
-
1
RSpec.feature 'Create a GenericWork', js: false do
-
1
context 'a logged in user' do
-
1
let(:user_attributes) do
-
{ email: 'test@example.com' }
-
end
-
1
let(:user) do
-
User.new(user_attributes) { |u| u.save(validate: false) }
-
end
-
2
let(:admin_set_id) { AdminSet.find_or_create_default_admin_set_id }
-
2
let(:permission_template) { Hyrax::PermissionTemplate.find_or_create_by!(source_id: admin_set_id) }
-
2
let(:workflow) { Sipity::Workflow.create!(active: true, name: 'test-workflow', permission_template: permission_template) }
-
-
1
before do
-
# Create a single action that can be taken
-
1
Sipity::WorkflowAction.create!(name: 'submit', workflow: workflow)
-
-
# Grant the user access to deposit into the admin set.
-
Hyrax::PermissionTemplateAccess.create!(
-
permission_template_id: permission_template.id,
-
agent_type: 'user',
-
agent_id: user.user_key,
-
access: 'deposit'
-
)
-
login_as user
-
end
-
-
1
scenario do
-
# # comment out until we can figure out why this error is happening
-
# # ActiveRecord::StatementInvalid:
-
# # SQLite3::BusyException: database is locked: UPDATE "users" SET "sign_in_count" = ?, "current_sign_in_at" = ?, "last_sign_in_at" = ?, "current_sign_in_ip" = ?, "last_sign_in_ip" = ?, "updated_at" = ? WHERE "users"."id" = ?
-
# visit '/dashboard'
-
# click_link "Works"
-
# click_link "Add new work"
-
#
-
# # If you generate more than one work uncomment these lines
-
# # choose "payload_concern", option: "GenericWork"
-
# # click_button "Create work"
-
#
-
# expect(page).to have_content "Add New Generic work"
-
# click_link "Files" # switch tab
-
# expect(page).to have_content "Add files"
-
# expect(page).to have_content "Add folder"
-
# within('span#addfiles') do
-
# attach_file("files[]", "#{Hyrax::Engine.root}/spec/fixtures/image.jp2", visible: false)
-
# attach_file("files[]", "#{Hyrax::Engine.root}/spec/fixtures/jp2_fits.xml", visible: false)
-
# end
-
# click_link "Descriptions" # switch tab
-
# fill_in('Title', with: 'My Test Work')
-
# fill_in('Creator', with: 'Doe, Jane')
-
# fill_in('Keyword', with: 'testing')
-
# select('In Copyright', from: 'Rights statement')
-
#
-
# # With selenium and the chrome driver, focus remains on the
-
# # select box. Click outside the box so the next line can't find
-
# # its element
-
# find('body').click
-
# choose('generic_work_visibility_open')
-
# expect(page).to have_content('Please note, making something visible to the world (i.e. marking this as Public) may be viewed as publishing which could impact your ability to')
-
# check('agreement')
-
#
-
# click_on('Save')
-
# expect(page).to have_content('My Test Work')
-
# expect(page).to have_content "Your files are being processed by Hyrax in the background."
-
end
-
end
-
end
-
# frozen_string_literal: true
-
-
1
require 'rails_helper'
-
-
1
describe Hyrax::Forms::CollectionForm do
-
-
1
describe "#terms" do
-
2
subject { described_class.terms }
-
-
2
it { is_expected.to eq %i[
-
authoremail
-
based_near
-
collection_type_gid
-
contributor
-
creator
-
date_coverage
-
date_created
-
description
-
fundedby
-
grantnumber
-
identifier
-
keyword
-
language
-
license
-
methodology
-
publisher
-
referenced_by
-
related_url
-
representative_id
-
resource_type
-
rights_license
-
subject
-
subject_discipline
-
thumbnail_id
-
title
-
visibility
-
] }
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
require 'rails_helper'
-
-
1
RSpec.describe Hyrax::DataSetForm do
-
-
# before(:all ) do
-
# puts "DataSet ids before=#{DataSet.all.map { |ds| ds.id }}"
-
# puts "FileSet ids before=#{FileSet.all.map { |fs| fs.id }}"
-
# end
-
#
-
# after(:all ) do
-
# puts "FileSet ids after=#{FileSet.all.map { |fs| fs.id }}"
-
# puts "DataSet ids after=#{DataSet.all.map { |ds| ds.id }}"
-
# # clean up created DataSet
-
# DataSet.all.each { |ds| ds.delete }
-
# FileSet.all.each { |fs| fs.delete }
-
# end
-
-
6
subject { form }
-
15
let(:work) { DataSet.new }
-
15
let(:user) { create(:user) }
-
15
let(:ability) { Ability.new(user) }
-
15
let(:form) { described_class.new(work, ability, nil) }
-
-
3
let( :expected_required_fields ) { %i[
-
title
-
creator
-
authoremail
-
methodology
-
description
-
rights_license
-
resource_type
-
description_abstract
-
publisher
-
] }
-
3
let( :expected_primary_terms ) { %i[
-
title
-
creator
-
authoremail
-
methodology
-
resource_type
-
description_abstract
-
description
-
publisher
-
date_coverage
-
rights_license
-
rights_license_other
-
license_other
-
fundedby
-
fundedby_other
-
keyword
-
language
-
referenced_by
-
geo_location_place
-
geo_location_box
-
] }
-
-
1
describe '#data_set?' do
-
1
it 'returns true' do
-
1
expect(form.data_set?).to eq true
-
end
-
end
-
-
1
describe "#required_fields" do
-
2
subject { form.required_fields }
-
-
2
it { is_expected.to eq expected_required_fields }
-
end
-
-
1
describe "#primary_terms" do
-
2
subject { form.primary_terms }
-
1
it do
-
1
is_expected.to eq expected_primary_terms
-
end
-
end
-
-
1
describe "#secondary_terms" do
-
2
subject { form.secondary_terms }
-
-
1
it do
-
1
is_expected.not_to include( :title,
-
:creator,
-
:keyword,
-
:visibilty,
-
:visibility_during_embargo,
-
:embargo_release_date,
-
:visibility_after_embargo,
-
:visibility_during_lease,
-
:lease_expiration_date,
-
:visibility_arights_statementfter_lease,
-
:collection_ids,
-
:additional_information )
-
1
is_expected.not_to include( *expected_required_fields )
-
1
is_expected.not_to include( *expected_primary_terms )
-
# is_expected.to include( :additional_information )
-
end
-
end
-
-
1
describe "#[]" do
-
3
subject { form[term] }
-
-
1
context "for member_of_collection_ids" do
-
3
let(:term) { :member_of_collection_ids }
-
-
2
it { is_expected.to be_empty }
-
-
1
context "when the model has collection ids" do
-
1
before do
-
1
allow(work).to receive(:member_of_collection_ids).and_return(['col1', 'col2'])
-
end
-
# This allows the edit form to show collections the work is already a member of.
-
2
it { is_expected.to eq ['col1', 'col2'] }
-
end
-
end
-
end
-
-
1
describe '.model_attributes' do # rubocop:disable RSpec/EmptyExampleGroup
-
# let(:permission_template) { create(:permission_template, source_id: source_id) }
-
# let!(:workflow) { create(:workflow, active: true, permission_template_id: permission_template.id) }
-
# let(:source_id) { '123' }
-
# let(:file_set) { create(:file_set) }
-
# let(:params) do
-
# ActionController::Parameters.new(
-
# title: ['foo'],
-
# description: [''],
-
# visibility: 'open',
-
# source_id: source_id,
-
# representative_id: '456',
-
# rendering_ids: [file_set.id],
-
# thumbnail_id: '789',
-
# keyword: ['derp'],
-
# license: ['http://creativecommons.org/licenses/by/3.0/us/'],
-
# member_of_collection_ids: ['123456', 'abcdef']
-
# )
-
# end
-
#
-
# subject { described_class.model_attributes(params) }
-
#
-
# it 'permits parameters' do
-
# expect(subject['title']).to eq ['foo']
-
# expect(subject['description']).to be_empty
-
# expect(subject['visibility']).to eq 'open'
-
# expect(subject['license']).to eq ['http://creativecommons.org/licenses/by/3.0/us/']
-
# expect(subject['keyword']).to eq ['derp']
-
# expect(subject['member_of_collection_ids']).to eq ['123456', 'abcdef']
-
# expect(subject['rendering_ids']).to eq [file_set.id]
-
# end
-
#
-
# context '.model_attributes' do
-
# let(:params) do
-
# ActionController::Parameters.new(
-
# title: [''],
-
# description: [''],
-
# keyword: [''],
-
# license: [''],
-
# member_of_collection_ids: [''],
-
# on_behalf_of: 'Melissa'
-
# )
-
# end
-
#
-
# it 'removes blank parameters' do
-
# expect(subject['title']).to be_empty
-
# expect(subject['description']).to be_empty
-
# expect(subject['license']).to be_empty
-
# expect(subject['keyword']).to be_empty
-
# expect(subject['member_of_collection_ids']).to be_empty
-
# expect(subject['on_behalf_of']).to eq 'Melissa'
-
# end
-
# end
-
end
-
-
1
describe "#visibility" do
-
2
subject { form.visibility }
-
-
2
it { is_expected.to eq 'open' }
-
end
-
-
2
it { is_expected.to delegate_method(:on_behalf_of).to(:model) }
-
2
it { is_expected.to delegate_method(:depositor).to(:model) }
-
2
it { is_expected.to delegate_method(:permissions).to(:model) }
-
-
1
describe "#agreement_accepted" do
-
2
subject { form.agreement_accepted }
-
-
2
it { is_expected.to eq false }
-
end
-
-
1
context "on a work already saved" do
-
2
before { allow(work).to receive(:new_record?).and_return(false) }
-
1
it "defaults deposit agreement to true" do
-
1
expect(form.agreement_accepted).to eq(true)
-
end
-
end
-
-
-
1
describe "#merge_date_coverage_attributes!" do
-
1
before {
-
2
subject.instance_variable_set(:@attributes, { "three" => "3", "four" => "4" })
-
}
-
-
1
context "when called with hash argument" do
-
1
it "merges with @attributes" do
-
1
subject.merge_date_coverage_attributes! Hash.new( :one => "1", :two => "2" )
-
-
1
subject.instance_variable_get(:@attributes) == Hash.new( "one" => "1", "two" => "2", "three" => "3", "four" => "4" )
-
end
-
end
-
-
1
context "when called with empty hash" do
-
1
it "keeps the same @attributes" do
-
1
subject.merge_date_coverage_attributes! Hash.new{ }
-
-
1
subject.instance_variable_get(:@attributes) == Hash.new( "three" => "3", "four" => "4" )
-
end
-
end
-
end
-
-
end
-
# Generated via
-
# `rails generate hyrax:work Dissertation`
-
1
require 'rails_helper'
-
-
1
RSpec.describe Hyrax::DissertationForm do
-
1
it "has tests" do
-
1
skip "Add your tests here"
-
end
-
end
-
# Generated via
-
# `rails generate hyrax:work GenericWork`
-
1
require 'rails_helper'
-
-
1
RSpec.describe Hyrax::GenericWorkForm do
-
1
it "has tests" do
-
1
skip "Add your tests here"
-
end
-
end
-
# frozen_string_literal: true
-
-
1
class CurationConcernMock
-
-
end
-
-
1
class CurationConcernWithProvLoggingMock
-
1
include ::Deepblue::ProvenanceBehavior
-
end
-
-
1
RSpec.describe Deepblue::EventHelper, type: :helper do
-
-
7
let( :user ) { 'user@umich.edu' }
-
-
1
describe '.after_batch_create_failure_callback' do
-
2
let( :event_name ) { 'after_batch_create_failure' }
-
2
let( :arrow_line ) { ">>>>> #{event_name} >>>>>" }
-
1
before do
-
1
allow( Rails.logger ).to receive( :debug ).with( any_args )
-
end
-
1
it do
-
1
Deepblue::EventHelper.after_batch_create_failure_callback( user: user )
-
1
expect( Rails.logger ).to have_received( :debug ).with( arrow_line ).exactly( 4 ).times
-
1
expect( Rails.logger ).to have_received( :debug ).with( "#{event_name} >>>>> #{user}" )
-
1
expect( Rails.logger ).to have_received( :debug ).exactly( 5 ).times
-
end
-
end
-
-
1
describe '.after_batch_create_success_callback' do
-
2
let( :event_name ) { 'after_batch_create_success' }
-
2
let( :arrow_line ) { ">>>>> #{event_name} >>>>>" }
-
1
before do
-
1
allow( Rails.logger ).to receive( :debug ).with( any_args )
-
end
-
1
it do
-
1
Deepblue::EventHelper.after_batch_create_success_callback( user: user )
-
1
expect( Rails.logger ).to have_received( :debug ).with( arrow_line ).exactly( 4 ).times
-
1
expect( Rails.logger ).to have_received( :debug ).with( "#{event_name} >>>>> #{user}" )
-
1
expect( Rails.logger ).to have_received( :debug ).exactly( 5 ).times
-
end
-
end
-
-
1
describe '.after_create_concern_callback' do
-
3
let( :event_name ) { 'after_create_concern' }
-
3
let( :arrow_line ) { ">>>>> #{event_name} >>>>>" }
-
1
context 'curation_concern concern responds to provenance_create' do
-
2
let( :curation_concern ) { CurationConcernWithProvLoggingMock.new }
-
1
before do
-
1
allow( Rails.logger ).to receive( :debug ).with( any_args )
-
1
allow( curation_concern ).to receive( :provenance_create ).with( current_user: user, event_note: event_name )
-
end
-
1
it do
-
1
Deepblue::EventHelper.after_create_concern_callback( curation_concern: curation_concern, user: user )
-
1
expect( Rails.logger ).to have_received( :debug ).with( arrow_line ).exactly( 4 ).times
-
1
expect( Rails.logger ).to have_received( :debug ).with( "#{event_name} >>>>> #{user} >>>>> #{curation_concern}" )
-
1
expect( Rails.logger ).to have_received( :debug ).exactly( 5 ).times
-
end
-
end
-
1
context 'curation_concern concern does not respond to provenance_create' do
-
2
let( :curation_concern ) { CurationConcernMock.new }
-
1
before do
-
1
allow( Rails.logger ).to receive( :debug ).with( any_args )
-
end
-
1
it do
-
1
Deepblue::EventHelper.after_create_concern_callback( curation_concern: curation_concern, user: user )
-
1
expect( Rails.logger ).to have_received( :debug ).with( arrow_line ).exactly( 4 ).times
-
1
expect( Rails.logger ).to have_received( :debug ).with( "#{event_name} >>>>> #{user} >>>>> #{curation_concern}" )
-
1
expect( Rails.logger ).to have_received( :debug ).exactly( 5 ).times
-
end
-
end
-
end
-
-
1
describe '.after_create_fileset_callback' do
-
3
let( :event_name ) { 'after_create_fileset' }
-
3
let( :arrow_line ) { ">>>>> #{event_name} >>>>>" }
-
1
context 'file_set responds to provenance_create' do
-
2
let( :file_set ) { CurationConcernWithProvLoggingMock.new }
-
1
before do
-
1
allow( Rails.logger ).to receive( :debug ).with( any_args )
-
1
allow( file_set ).to receive( :provenance_create ).with( current_user: user, event_note: event_name )
-
end
-
1
it do
-
1
Deepblue::EventHelper.after_create_fileset_callback( file_set: file_set, user: user )
-
1
expect( Rails.logger ).to have_received( :debug ).with( arrow_line ).exactly( 4 ).times
-
1
expect( Rails.logger ).to have_received( :debug ).with( "#{event_name} >>>>> #{user} >>>>> #{file_set}" )
-
1
expect( Rails.logger ).to have_received( :debug ).exactly( 5 ).times
-
end
-
end
-
1
context 'file_set does not respond to provenance_create' do
-
2
let( :file_set ) { CurationConcernMock.new }
-
1
before do
-
1
allow( Rails.logger ).to receive( :debug ).with( any_args )
-
end
-
1
it do
-
1
Deepblue::EventHelper.after_create_fileset_callback( file_set: file_set, user: user )
-
1
expect( Rails.logger ).to have_received( :debug ).with( arrow_line ).exactly( 4 ).times
-
1
expect( Rails.logger ).to have_received( :debug ).with( "#{event_name} >>>>> #{user} >>>>> #{file_set}" )
-
1
expect( Rails.logger ).to have_received( :debug ).exactly( 5 ).times
-
end
-
end
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
RSpec.describe Deepblue::LoggingHelper, type: :helper do
-
-
1
describe '.bold_debug' do
-
5
let( :arrow_line ) { ">>>>>>>>>>" }
-
3
let( :msg ) { 'The message.' }
-
-
1
context 'with msg' do
-
1
before do
-
1
allow( Rails.logger ).to receive( :debug ).with( any_args )
-
end
-
1
it do
-
1
Deepblue::LoggingHelper.bold_debug( msg )
-
1
expect( Rails.logger ).to have_received( :debug ).with( arrow_line ).exactly( 2 ).times
-
1
expect( Rails.logger ).to have_received( :debug ).with( msg )
-
1
expect( Rails.logger ).to have_received( :debug ).exactly( 3 ).times
-
end
-
end
-
-
1
context 'with msg and lines: 2' do
-
1
before do
-
1
allow( Rails.logger ).to receive( :debug ).with( any_args )
-
end
-
1
it do
-
1
Deepblue::LoggingHelper.bold_debug( msg, lines: 2 )
-
1
expect( Rails.logger ).to have_received( :debug ).with( arrow_line ).exactly( 4 ).times
-
1
expect( Rails.logger ).to have_received( :debug ).with( msg )
-
1
expect( Rails.logger ).to have_received( :debug ).exactly( 5 ).times
-
end
-
end
-
-
1
context 'with msg as array' do
-
2
let( :msg_line_1 ) { "line 1" }
-
2
let( :msg_line_2 ) { "line 2" }
-
2
let( :msg_array ) { [ msg_line_1, msg_line_2 ] }
-
1
before do
-
1
allow( Rails.logger ).to receive( :debug ).with( any_args )
-
end
-
1
it do
-
1
Deepblue::LoggingHelper.bold_debug( msg_array )
-
1
expect( Rails.logger ).to have_received( :debug ).with( arrow_line ).exactly( 2 ).times
-
1
expect( Rails.logger ).to have_received( :debug ).with( msg_line_1 )
-
1
expect( Rails.logger ).to have_received( :debug ).with( msg_line_1 )
-
1
expect( Rails.logger ).to have_received( :debug ).exactly( 4 ).times
-
end
-
end
-
-
1
context 'with msg as hash' do
-
2
let( :msg_key1 ) { :key1 }
-
2
let( :msg_key2 ) { :key2 }
-
2
let( :msg_value_1 ) { "value 1" }
-
2
let( :msg_value_2 ) { "value 2" }
-
2
let( :msg_hash ) { [ msg_key1 => msg_value_1, msg_key2 => msg_value_2 ] }
-
1
before do
-
1
allow( Rails.logger ).to receive( :debug ).with( any_args )
-
end
-
1
it do
-
1
Deepblue::LoggingHelper.bold_debug( msg_hash )
-
1
expect( Rails.logger ).to have_received( :debug ).with( arrow_line ).exactly( 2 ).times
-
1
expect( Rails.logger ).to have_received( :debug ).with( "#{msg_key1}: #{msg_value_1}" )
-
1
expect( Rails.logger ).to have_received( :debug ).with( "#{msg_key2}: #{msg_value_2}" )
-
1
expect( Rails.logger ).to have_received( :debug ).exactly( 4 ).times
-
end
-
end
-
-
# context 'with block msg' do
-
# let( :block_msg ) { 'The block message.' }
-
# before do
-
# allow( Rails.logger ).to receive( :debug ).with( any_args )
-
# end
-
# it do
-
# Deepblue::LoggingHelper.bold_debug( lines: 2 ) { block_msg }
-
# expect( Rails.logger ).to have_received( :debug ).with( arrow_line ).exactly( 4 ).times
-
# expect( Rails.logger ).to have_received( :debug ).with( block_msg )
-
# expect( Rails.logger ).to have_received( :debug ).exactly( 5 ).times
-
# end
-
# end
-
end
-
-
1
describe '.initialize_key_values' do
-
4
let( :event_note ) { 'the_event_note' }
-
6
let( :user_email ) { 'user@email.com' }
-
-
1
context 'parameters: user_email and event_note' do
-
1
subject do
-
3
lambda do |user_email, event_note|
-
3
Deepblue::LoggingHelper.initialize_key_values( user_email: user_email, event_note: event_note )
-
end
-
end
-
-
2
let( :result_both ) { { user_email: user_email, event_note: event_note } }
-
3
let( :result_no_event_note ) { { user_email: user_email } }
-
-
2
it { expect( subject.call( user_email, event_note ) ).to eq result_both }
-
2
it { expect( subject.call( user_email, '' ) ).to eq result_no_event_note }
-
2
it { expect( subject.call( user_email, nil ) ).to eq result_no_event_note }
-
end
-
-
1
context 'parameters: user_email, event_note and added' do
-
3
let( :added1 ) { 'one' }
-
2
let( :added2 ) { 'two' }
-
-
2
let( :result1 ) { { user_email: user_email, event_note: event_note, added1: added1 } }
-
2
let( :result2 ) { { user_email: user_email, event_note: event_note, added1: added1, added2: added2 } }
-
-
1
it 'returns a hash containing user_email, event_note, and added1' do
-
1
expect( Deepblue::LoggingHelper.initialize_key_values( user_email: user_email,
-
event_note: event_note,
-
added1: added1 ) ).to eq result1
-
end
-
-
1
it 'returns a hash containing user_email, event_note, added1, and added2' do
-
1
expect( Deepblue::LoggingHelper.initialize_key_values( user_email: user_email,
-
event_note: event_note,
-
added1: added1,
-
added2: added2 ) ).to eq result2
-
end
-
end
-
-
end
-
-
1
describe '.msg_to_log' do
-
3
let( :class_name ) { 'DataSet' }
-
3
let( :event ) { 'the_event' }
-
2
let( :event_note ) { 'the_event_note' }
-
2
let( :blank_event_note ) { '' }
-
3
let( :id ) { 'id1234' }
-
3
let( :timestamp ) { Time.now.to_formatted_s(:db ) }
-
3
let( :time_zone ) { DateTime.now.zone }
-
-
1
context 'parms without added' do
-
2
let( :key_values ) { { event: event,
-
event_note: event_note,
-
timestamp: timestamp,
-
time_zone: time_zone,
-
class_name: class_name,
-
id: id } }
-
2
let( :json ) { ActiveSupport::JSON.encode key_values }
-
2
let( :result1 ) { "#{timestamp} #{event}/#{event_note}/#{class_name}/#{id} #{json}" }
-
1
it do
-
1
expect( Deepblue::LoggingHelper.msg_to_log( class_name: class_name,
-
event: event,
-
event_note: event_note,
-
id: id,
-
timestamp: timestamp,
-
time_zone: time_zone ) ).to eq result1
-
end
-
end
-
-
1
context 'parms, blank event_note, without added' do
-
2
let( :key_values ) { { event: event, timestamp: timestamp, time_zone: time_zone, class_name: class_name, id: id } }
-
2
let( :json ) { ActiveSupport::JSON.encode key_values }
-
2
let( :result1 ) { "#{timestamp} #{event}//#{class_name}/#{id} #{json}" }
-
1
it do
-
1
expect( Deepblue::LoggingHelper.msg_to_log( class_name: class_name,
-
event: event,
-
event_note: blank_event_note,
-
id: id,
-
timestamp: timestamp,
-
time_zone: time_zone ) ).to eq result1
-
end
-
end
-
-
end
-
-
1
describe '.system_as_current_user' do
-
2
subject { Deepblue::LoggingHelper.system_as_current_user }
-
2
it { expect( subject ).to eq 'Deepblue' }
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
RSpec.describe Deepblue::ProvenanceHelper, type: :helper do
-
-
1
describe 'constants' do
-
1
it do
-
1
expect( Deepblue::JsonLoggerHelper::TIMESTAMP_FORMAT ).to eq '\d\d\d\d\-\d\d\-\d\d \d\d:\d\d:\d\d'
-
1
expect( Deepblue::JsonLoggerHelper::RE_TIMESTAMP_FORMAT.source ).to eq '^\d\d\d\d\-\d\d\-\d\d \d\d:\d\d:\d\d$'
-
1
expect( Deepblue::JsonLoggerHelper::RE_LOG_LINE.source ).to \
-
eq '^(\d\d\d\d\-\d\d\-\d\d \d\d:\d\d:\d\d) ([^/]+)/([^/]*)/([^/]+)/([^/ ]*) (.*)$'
-
1
expect( Deepblue::JsonLoggerHelper::PREFIX_UPDATE_ATTRIBUTE ).to eq 'UpdateAttribute_'
-
end
-
end
-
-
1
describe '.echo_to_rails_logger' do
-
2
subject { Deepblue::ProvenanceHelper.echo_to_rails_logger }
-
2
it { expect( subject ).to eq true }
-
end
-
-
1
describe '.form_params_to_update_attribute_key_values' do
-
4
let( :authoremail ) { 'authoremail@umich.edu' }
-
4
let( :creator ) { 'Creator, A' }
-
1
let( :current_user ) { 'user@umich.edu' }
-
4
let( :date_created ) { '2018-02-28' }
-
4
let( :depositor ) { authoremail }
-
4
let( :description ) { 'The Description' }
-
1
let( :id ) { '0123458678' }
-
4
let( :methodology ) { 'The Methodology' }
-
2
let( :methodology_new ) { 'The New Methodology' }
-
4
let( :rights_license ) { 'The Rights License' }
-
4
let( :subject_discipline ) { 'The Subject Discipline' }
-
4
let( :title ) { 'The Title' }
-
3
let( :visibility_private ) { Hydra::AccessControls::AccessRight::VISIBILITY_TEXT_VALUE_PRIVATE }
-
4
let( :visibility_public ) { Hydra::AccessControls::AccessRight::VISIBILITY_TEXT_VALUE_PUBLIC }
-
1
let( :curation_concern ) do
-
3
DataSet.new( authoremail: authoremail,
-
title: [title],
-
creator: [creator],
-
date_created: date_created,
-
depositor: depositor,
-
description: [description],
-
methodology: methodology,
-
rights_license: rights_license,
-
subject_discipline: [subject_discipline],
-
visibility: visibility_public )
-
end
-
-
1
context 'No changes' do
-
1
let( :form_params ) do
-
1
{ "title": [title, ""],
-
"creator": [creator, ""],
-
"authoremail": authoremail,
-
"methodology": methodology,
-
"description": [description, ""],
-
"rights_license": rights_license,
-
"subject_discipline": [subject_discipline, ""],
-
"fundedby": "",
-
"fundedby_other": "",
-
"grantnumber": "",
-
"keyword": [""],
-
"language": [""],
-
"referenced_by": [""],
-
"member_of_collection_ids": "",
-
"find_child_work": "",
-
"permissions_attributes": { "0": { "access": "edit", "id": "197055dd-3e5e-4714-9878-8620f2195428/39/2e/47/ca/392e47ca-b01b-4c3f-afb9-9ddb537fdacc" } },
-
"visibility_during_embargo": "restricted",
-
"embargo_release_date": "2018-06-30",
-
"visibility_after_embargo": "restricted",
-
"visibility_during_lease": "open",
-
"lease_expiration_date": "2018-06-30",
-
"visibility_after_lease": "open",
-
"visibility": visibility_private,
-
"version": "W/\"591319c1fdd3c69832f55e8fbbef903a4a0381a5\"",
-
"date_coverage": "" }
-
end
-
2
let( :expected_attr_key_values ) { {:UpdateAttribute_visibility=>{:attribute=>:visibility, :new_value=>"restricted", :old_value=>"open"}} }
-
-
1
it do
-
1
attr_key_values = Deepblue::ProvenanceHelper.form_params_to_update_attribute_key_values( curation_concern: curation_concern,
-
form_params: form_params )
-
1
expect( attr_key_values ).to eq expected_attr_key_values
-
end
-
end
-
-
1
context 'Only visibility change.' do
-
1
let( :form_params ) do
-
1
{ "title": [title, ""],
-
"creator": [creator, ""],
-
"authoremail": authoremail,
-
"methodology": methodology,
-
"description": [description, ""],
-
"rights_license": rights_license,
-
"subject_discipline": [subject_discipline, ""],
-
"fundedby": "",
-
"fundedby_other": "",
-
"grantnumber": "",
-
"keyword": [""],
-
"language": [""],
-
"referenced_by": [""],
-
"member_of_collection_ids": "",
-
"find_child_work": "",
-
"permissions_attributes": { "0": { "access": "edit", "id": "197055dd-3e5e-4714-9878-8620f2195428/39/2e/47/ca/392e47ca-b01b-4c3f-afb9-9ddb537fdacc" } },
-
"visibility_during_embargo": "restricted",
-
"embargo_release_date": "2018-06-30",
-
"visibility_after_embargo": "open",
-
"visibility_during_lease": "open",
-
"lease_expiration_date": "2018-06-30",
-
"visibility_after_lease": "restricted",
-
"visibility": visibility_private,
-
"version": "W/\"591319c1fdd3c69832f55e8fbbef903a4a0381a5\"",
-
"date_coverage": "" }
-
end
-
2
let( :expected_attr_key_values ) { {:UpdateAttribute_visibility=>{:attribute=>:visibility, :new_value=>"restricted", :old_value=>"open"}} }
-
-
1
it do
-
1
attr_key_values = Deepblue::ProvenanceHelper.form_params_to_update_attribute_key_values( curation_concern: curation_concern,
-
form_params: form_params )
-
1
expect( attr_key_values ).to eq expected_attr_key_values
-
end
-
end
-
-
1
context 'methodology updated' do
-
1
let( :form_params ) do
-
1
{ "title": [title, ""],
-
"creator": [creator, ""],
-
"authoremail": authoremail,
-
"methodology": methodology_new,
-
"description": [description, ""],
-
"rights_license": rights_license,
-
"subject_discipline": [subject_discipline, ""],
-
"fundedby": "",
-
"fundedby_other": "",
-
"grantnumber": "",
-
"keyword": [""],
-
"language": [""],
-
"referenced_by": [""],
-
"member_of_collection_ids": "",
-
"find_child_work": "",
-
"permissions_attributes": { "0": { "access": "edit", "id": "197055dd-3e5e-4714-9878-8620f2195428/39/2e/47/ca/392e47ca-b01b-4c3f-afb9-9ddb537fdacc" } },
-
"visibility_during_embargo": "restricted",
-
"embargo_release_date": "2018-06-30",
-
"visibility_after_embargo": "open",
-
"visibility_during_lease": "open",
-
"lease_expiration_date": "2018-06-30",
-
"visibility_after_lease": "open",
-
"visibility": visibility_public,
-
"version": "W/\"591319c1fdd3c69832f55e8fbbef903a4a0381a5\"",
-
"date_coverage": "" }
-
end
-
2
let( :expected_attr_key_values ) { { UpdateAttribute_methodology: { attribute: :methodology, old_value: methodology, new_value: methodology_new } } }
-
-
1
it do
-
1
attr_key_values = Deepblue::ProvenanceHelper.form_params_to_update_attribute_key_values( curation_concern: curation_concern,
-
form_params: form_params )
-
1
expect( attr_key_values ).to eq expected_attr_key_values
-
end
-
end
-
-
end
-
-
1
describe '.initialize_prov_key_values' do
-
4
let( :event_note ) { 'the_event_note' }
-
6
let( :user_email ) { 'user@email.com' }
-
-
1
context 'parameters: user_email and event_note' do
-
1
subject do
-
3
lambda do |user_email, event_note|
-
3
Deepblue::ProvenanceHelper.logger_initialize_key_values(user_email: user_email, event_note: event_note )
-
end
-
end
-
-
2
let( :result_both ) { { user_email: user_email, event_note: event_note } }
-
3
let( :result_no_event_note ) { { user_email: user_email } }
-
-
2
it { expect( subject.call( user_email, event_note ) ).to eq result_both }
-
2
it { expect( subject.call( user_email, '' ) ).to eq result_no_event_note }
-
2
it { expect( subject.call( user_email, nil ) ).to eq result_no_event_note }
-
end
-
-
1
context 'parameters: user_email, event_note and added' do
-
3
let( :added1 ) { 'one' }
-
2
let( :added2 ) { 'two' }
-
-
2
let( :result1 ) { { user_email: user_email, event_note: event_note, added1: added1 } }
-
2
let( :result2 ) { { user_email: user_email, event_note: event_note, added1: added1, added2: added2 } }
-
-
1
it 'returns a hash containing user_email, event_note, and added1' do
-
1
expect( Deepblue::ProvenanceHelper.logger_initialize_key_values(user_email: user_email,
-
event_note: event_note,
-
added1: added1 ) ).to eq result1
-
end
-
-
1
it 'returns a hash containing user_email, event_note, added1, and added2' do
-
1
expect( Deepblue::ProvenanceHelper.logger_initialize_key_values(user_email: user_email,
-
event_note: event_note,
-
added1: added1,
-
added2: added2 ) ).to eq result2
-
end
-
end
-
end
-
-
1
describe '.msg_to_log' do
-
3
let( :class_name ) { 'DataSet' }
-
3
let( :event ) { 'the_event' }
-
2
let( :event_note ) { 'the_event_note' }
-
2
let( :blank_event_note ) { '' }
-
3
let( :id ) { 'id1234' }
-
3
let( :timestamp ) { Time.now.to_formatted_s(:db ) }
-
3
let( :time_zone ) { DateTime.now.zone }
-
-
1
context 'parms without added' do
-
2
let( :key_values ) { { event: event,
-
event_note: event_note,
-
timestamp: timestamp,
-
time_zone: time_zone,
-
class_name: class_name,
-
id: id } }
-
2
let( :json ) { ActiveSupport::JSON.encode key_values }
-
2
let( :result1 ) { "#{timestamp} #{event}/#{event_note}/#{class_name}/#{id} #{json}" }
-
1
it do
-
1
expect( Deepblue::ProvenanceHelper.msg_to_log( class_name: class_name,
-
event: event,
-
event_note: event_note,
-
id: id,
-
timestamp: timestamp,
-
time_zone: time_zone ) ).to eq result1
-
end
-
end
-
-
1
context 'parms, blank event_note, without added' do
-
2
let( :key_values ) { { event: event, timestamp: timestamp, time_zone: time_zone, class_name: class_name, id: id } }
-
2
let( :json ) { ActiveSupport::JSON.encode key_values }
-
2
let( :result1 ) { "#{timestamp} #{event}//#{class_name}/#{id} #{json}" }
-
1
it do
-
1
expect( Deepblue::ProvenanceHelper.msg_to_log( class_name: class_name,
-
event: event,
-
event_note: blank_event_note,
-
id: id,
-
timestamp: timestamp,
-
time_zone: time_zone ) ).to eq result1
-
end
-
end
-
end
-
-
1
describe '.log' do
-
2
let( :added1 ) { 'one' }
-
2
let( :added2 ) { 'two' }
-
3
let( :class_name ) { 'DataSet' }
-
2
let( :class_name_default ) { 'UnknownClass' }
-
3
let( :event ) { 'the_event' }
-
2
let( :event_default ) { 'unknown' }
-
3
let( :event_note ) { 'the_event_note' }
-
2
let( :event_note_default ) { '' }
-
1
let( :blank_event_note ) { '' }
-
3
let( :id ) { 'id1234' }
-
2
let( :id_default ) { 'unknown_id' }
-
1
let( :timestamp ) { Time.now.to_formatted_s(:db ) }
-
2
let( :time_zone ) { DateTime.now.zone }
-
-
1
context 'no parms' do
-
1
it do
-
1
prov_logger_received = nil
-
2
allow( PROV_LOGGER ).to receive( :info ) { |msg| prov_logger_received = msg }
-
1
before = Deepblue::ProvenanceHelper.to_log_format_timestamp Time.now
-
1
Deepblue::ProvenanceHelper.log
-
1
after = Deepblue::ProvenanceHelper.to_log_format_timestamp Time.now
-
1
expect( prov_logger_received ).to be_a String
-
rv_timestamp,
-
rv_event,
-
rv_event_note,
-
rv_class_name,
-
rv_id,
-
1
rv_key_values = Deepblue::ProvenanceHelper.parse_log_line prov_logger_received
-
1
expect( rv_timestamp ).to be_between( before, after )
-
1
expect( rv_event ).to eq event_default
-
1
expect( rv_event_note ).to eq event_note_default
-
1
expect( rv_class_name ).to eq class_name_default
-
1
expect( rv_id ).to eq id_default
-
1
expect( rv_key_values['timestamp'] ).to be_between( before, after )
-
1
expect( rv_key_values['time_zone'] ).to eq time_zone
-
1
expect( rv_key_values['event'] ).to eq event_default
-
# expect( rv_key_values['event_note'] ).to eq event_note_default
-
1
expect( rv_key_values['class_name'] ).to eq class_name_default
-
1
expect( rv_key_values['id'] ).to eq id_default
-
1
expect( rv_key_values.size ).to eq 5
-
end
-
end
-
-
1
context 'parms specified, no added' do
-
2
let( :timestamp ) { Deepblue::ProvenanceHelper.to_log_format_timestamp( 5.minutes.ago ) }
-
2
let( :time_zone ) { DateTime.now.zone }
-
1
it do
-
1
prov_logger_received = nil
-
2
allow( PROV_LOGGER ).to receive( :info ) { |msg| prov_logger_received = msg }
-
1
Deepblue::ProvenanceHelper.log( class_name: class_name,
-
event: event,
-
event_note: event_note,
-
id: id,
-
timestamp: timestamp,
-
time_zone: time_zone )
-
1
expect( prov_logger_received ).to be_a String
-
rv_timestamp,
-
rv_event,
-
rv_event_note,
-
rv_class_name,
-
rv_id,
-
1
rv_key_values = Deepblue::ProvenanceHelper.parse_log_line prov_logger_received
-
1
expect( rv_timestamp ).to eq timestamp
-
1
expect( rv_event ).to eq event
-
1
expect( rv_event_note ).to eq event_note
-
1
expect( rv_class_name ).to eq class_name
-
1
expect( rv_id ).to eq id
-
1
expect( rv_key_values['timestamp'] ).to eq timestamp
-
1
expect( rv_key_values['time_zone'] ).to eq time_zone
-
1
expect( rv_key_values['event'] ).to eq event
-
1
expect( rv_key_values['event_note'] ).to eq event_note
-
1
expect( rv_key_values['class_name'] ).to eq class_name
-
1
expect( rv_key_values['id'] ).to eq id
-
1
expect( rv_key_values.size ).to eq 6
-
end
-
end
-
-
1
context 'parms specified and added' do
-
2
let( :timestamp ) { Deepblue::ProvenanceHelper.to_log_format_timestamp( 5.minutes.ago ) }
-
2
let( :time_zone ) { DateTime.now.zone }
-
1
it do
-
1
prov_logger_received = nil
-
2
allow( PROV_LOGGER ).to receive( :info ) { |msg| prov_logger_received = msg }
-
1
Deepblue::ProvenanceHelper.log( class_name: class_name,
-
event: event,
-
event_note: event_note,
-
id: id,
-
timestamp: timestamp,
-
time_zone: time_zone,
-
added1: added1,
-
added2: added2 )
-
1
expect( prov_logger_received ).to be_a String
-
rv_timestamp,
-
rv_event,
-
rv_event_note,
-
rv_class_name,
-
rv_id,
-
1
rv_key_values = Deepblue::ProvenanceHelper.parse_log_line prov_logger_received
-
1
expect( rv_timestamp ).to eq timestamp
-
1
expect( rv_event ).to eq event
-
1
expect( rv_event_note ).to eq event_note
-
1
expect( rv_class_name ).to eq class_name
-
1
expect( rv_id ).to eq id
-
1
expect( rv_key_values['timestamp'] ).to eq timestamp
-
1
expect( rv_key_values['time_zone'] ).to eq time_zone
-
1
expect( rv_key_values['event'] ).to eq event
-
1
expect( rv_key_values['event_note'] ).to eq event_note
-
1
expect( rv_key_values['class_name'] ).to eq class_name
-
1
expect( rv_key_values['id'] ).to eq id
-
1
expect( rv_key_values['added1'] ).to eq added1
-
1
expect( rv_key_values['added2'] ).to eq added2
-
1
expect( rv_key_values.size ).to eq 8
-
end
-
end
-
end
-
-
1
describe '.log_raw' do
-
2
let( :msg ) { 'The message.' }
-
1
before do
-
1
allow( PROV_LOGGER ).to receive( :info ).with( msg )
-
end
-
1
it do
-
1
Deepblue::ProvenanceHelper.log_raw msg
-
end
-
end
-
-
1
describe '.parse_log_line' do
-
2
let( :added1 ) { 'one' }
-
2
let( :added2 ) { 'two' }
-
4
let( :class_name ) { 'DataSet' }
-
4
let( :event ) { 'the_event' }
-
3
let( :event_note ) { 'the_event_note' }
-
2
let( :blank_event_note ) { '' }
-
4
let( :id ) { 'id1234' }
-
4
let( :timestamp ) { Time.now.to_formatted_s(:db ) }
-
4
let( :time_zone ) { DateTime.now.zone }
-
-
1
context 'bad input raises error' do
-
1
it do
-
2
expect { Deepblue::ProvenanceHelper.parse_log_line( '' ) }.to \
-
raise_error( Deepblue::LogParseError, "parse of log line failed: ''" )
-
2
expect { Deepblue::ProvenanceHelper.parse_log_line( nil ) }.to \
-
raise_error( Deepblue::LogParseError, "parse of log line failed: ''" )
-
2
expect { Deepblue::ProvenanceHelper.parse_log_line( 'Some non-formatted line' ) }.to \
-
raise_error( Deepblue::LogParseError, "parse of log line failed: 'Some non-formatted line'" )
-
end
-
end
-
-
1
context 'parms and added parms' do
-
2
let( :before ) { Deepblue::ProvenanceHelper.to_log_format_timestamp Time.now }
-
2
let( :line ) { Deepblue::ProvenanceHelper.msg_to_log( class_name: class_name,
-
event: event,
-
event_note: event_note,
-
id: id,
-
timestamp: timestamp,
-
time_zone: time_zone,
-
added1: added1,
-
added2: added2 ) }
-
2
let( :after ) { Deepblue::ProvenanceHelper.to_log_format_timestamp Time.now }
-
1
it do
-
rv_timestamp,
-
rv_event,
-
rv_event_note,
-
rv_class_name,
-
rv_id,
-
1
rv_key_values = Deepblue::ProvenanceHelper.parse_log_line line
-
1
expect( rv_timestamp ).to be_between( before, after )
-
1
expect( rv_event ).to eq event
-
1
expect( rv_event_note ).to eq event_note
-
1
expect( rv_class_name ).to eq class_name
-
1
expect( rv_id ).to eq id
-
1
expect( rv_key_values['timestamp'] ).to be_between( before, after )
-
1
expect( rv_key_values['time_zone'] ).to eq time_zone
-
1
expect( rv_key_values['event'] ).to eq event
-
1
expect( rv_key_values['event_note'] ).to eq event_note
-
1
expect( rv_key_values['class_name'] ).to eq class_name
-
1
expect( rv_key_values['id'] ).to eq id
-
1
expect( rv_key_values['added1'] ).to eq added1
-
1
expect( rv_key_values['added2'] ).to eq added2
-
1
expect( rv_key_values.size ).to eq 8
-
end
-
end
-
-
1
context 'parms without added' do
-
2
let( :before ) { Deepblue::ProvenanceHelper.to_log_format_timestamp Time.now }
-
2
let( :line ) { Deepblue::ProvenanceHelper.msg_to_log( class_name: class_name,
-
event: event,
-
event_note: event_note,
-
id: id,
-
timestamp: timestamp,
-
time_zone: time_zone ) }
-
2
let( :after ) { Deepblue::ProvenanceHelper.to_log_format_timestamp Time.now }
-
1
it do
-
rv_timestamp,
-
rv_event,
-
rv_event_note,
-
rv_class_name,
-
rv_id,
-
1
rv_key_values = Deepblue::ProvenanceHelper.parse_log_line line
-
1
expect( rv_timestamp ).to be_between( before, after )
-
1
expect( rv_event ).to eq event
-
1
expect( rv_event_note ).to eq event_note
-
1
expect( rv_class_name ).to eq class_name
-
1
expect( rv_id ).to eq id
-
1
expect( rv_key_values['timestamp'] ).to be_between( before, after )
-
1
expect( rv_key_values['event'] ).to eq event
-
1
expect( rv_key_values['event_note'] ).to eq event_note
-
1
expect( rv_key_values['class_name'] ).to eq class_name
-
1
expect( rv_key_values['id'] ).to eq id
-
1
expect( rv_key_values.size ).to eq 6
-
end
-
end
-
-
1
context 'parms without added and blank event note' do
-
2
let( :before ) { Deepblue::ProvenanceHelper.to_log_format_timestamp Time.now }
-
2
let( :line ) { Deepblue::ProvenanceHelper.msg_to_log( class_name: class_name,
-
event: event,
-
event_note: blank_event_note,
-
id: id,
-
timestamp: timestamp,
-
time_zone: time_zone ) }
-
2
let( :after ) { Deepblue::ProvenanceHelper.to_log_format_timestamp Time.now }
-
1
it do
-
rv_timestamp,
-
rv_event,
-
rv_event_note,
-
rv_class_name,
-
rv_id,
-
1
rv_key_values = Deepblue::ProvenanceHelper.parse_log_line line
-
1
expect( rv_timestamp ).to be_between( before, after )
-
1
expect( rv_event ).to eq event
-
1
expect( rv_event_note ).to eq blank_event_note
-
1
expect( rv_class_name ).to eq class_name
-
1
expect( rv_id ).to eq id
-
1
expect( rv_key_values['timestamp'] ).to be_between( before, after )
-
1
expect( rv_key_values['time_zone'] ).to eq time_zone
-
1
expect( rv_key_values['event'] ).to eq event
-
# expect( rv_key_values['event_note'] ).to eq event_note
-
1
expect( rv_key_values['class_name'] ).to eq class_name
-
1
expect( rv_key_values['id'] ).to eq id
-
1
expect( rv_key_values.size ).to eq 5
-
end
-
end
-
end
-
-
1
describe '.system_as_current_user' do
-
2
subject { Deepblue::ProvenanceHelper.system_as_current_user }
-
2
it { expect( subject ).to eq 'Deepblue' }
-
end
-
-
1
describe '.timestamp_now' do
-
1
context 'is formatted correctly' do
-
1
it do
-
1
expect( Deepblue::ProvenanceHelper.timestamp_now ).to match '^\d\d\d\d\-\d\d\-\d\d \d\d:\d\d:\d\d$'
-
end
-
end
-
1
context 'is now' do
-
2
let( :before ) { Deepblue::ProvenanceHelper.to_log_format_timestamp Time.now }
-
2
let( :timestamp_now ) { Deepblue::ProvenanceHelper.timestamp_now }
-
2
let( :after ) { Deepblue::ProvenanceHelper.to_log_format_timestamp Time.now }
-
1
it do
-
1
expect( timestamp_now ).to be_between( before, after )
-
end
-
end
-
end
-
-
1
describe '.to_log_format_timestamp' do
-
4
let( :time_now ) { Time.now }
-
2
let( :timestamp_now ) { time_now.to_formatted_s( :db ) }
-
4
let( :timestamp_re ) { '^\d\d\d\d\-\d\d\-\d\d \d\d:\d\d:\d\d$' }
-
1
context 'for correctly formatted string' do
-
1
it do
-
1
expect( Deepblue::ProvenanceHelper.to_log_format_timestamp( timestamp_now ) ).to match timestamp_re
-
end
-
end
-
1
context 'for correctly Time' do
-
1
it do
-
1
expect( Deepblue::ProvenanceHelper.to_log_format_timestamp( time_now ) ).to match timestamp_re
-
end
-
end
-
1
context 'for correctly different format string' do
-
1
it do
-
1
expect( Deepblue::ProvenanceHelper.to_log_format_timestamp( time_now.to_s ) ).to match timestamp_re
-
end
-
end
-
end
-
-
1
describe '.update_attribute_key_values' do
-
2
let( :authoremail ) { 'authoremail@umich.edu' }
-
2
let( :creator ) { [ 'Creator, A' ] }
-
1
let( :current_user ) { 'user@umich.edu' }
-
2
let( :date_created ) { '2018-02-28' }
-
2
let( :depositor ) { authoremail }
-
2
let( :description ) { [ 'The Description' ] }
-
1
let( :id ) { '0123458678' }
-
2
let( :methodology_new ) { 'The New Methodology' }
-
2
let( :methodology_old ) { 'The Old Methodology' }
-
2
let( :rights_license ) { 'The Rights License' }
-
2
let( :title ) { [ 'The Title' ] }
-
2
let( :subject_discipline ) { 'The Subject Discipline' }
-
1
let( :visibility_private ) { Hydra::AccessControls::AccessRight::VISIBILITY_TEXT_VALUE_PRIVATE }
-
2
let( :visibility_public ) { Hydra::AccessControls::AccessRight::VISIBILITY_TEXT_VALUE_PUBLIC }
-
1
let( :curation_concern ) do
-
1
DataSet.new( authoremail: authoremail,
-
title: title,
-
creator: creator,
-
date_created: date_created,
-
depositor: depositor,
-
description: description,
-
methodology: methodology_new,
-
rights_license: rights_license,
-
subject_discipline: [subject_discipline],
-
visibility: visibility_public )
-
end
-
2
let( :update_attr_key_values ) { { UpdateAttribute_methodology: { attribute: :methodology, old_value: methodology_old, new_value: 'some value from form' } } }
-
1
it do
-
1
updated = Deepblue::ProvenanceHelper.update_attribute_key_values( curation_concern: curation_concern,
-
update_attr_key_values: update_attr_key_values )
-
# puts ActiveSupport::JSON.encode updated
-
# updated = updated[:update_attr_key_values]
-
1
expect( updated.size ).to be 1
-
1
expect( updated.key?(:UpdateAttribute_methodology) ).to be true
-
1
expect( updated[:UpdateAttribute_methodology][:attribute] ).to eq :methodology
-
1
expect( updated[:UpdateAttribute_methodology][:old_value] ).to eq methodology_old
-
1
expect( updated[:UpdateAttribute_methodology][:new_value] ).to eq methodology_new
-
end
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
require 'umich_clamav_daemon_scanner'
-
-
1
RSpec.describe UMichClamAVDaemonScanner do
-
-
1
let(:clamd_running) { !!/\S/.match(`pgrep clamd`) }
-
-
4
let(:basic_av_connection) { described_class.new(__FILE__) }
-
-
1
it "can scan a harmless file" do
-
1
skip("ClamAV Daemon not running") unless basic_av_connection.alive?
-
scanner = described_class.new(__FILE__)
-
expect(scanner.scan_response).to be_a(ClamAV::SuccessResponse)
-
end
-
-
1
it "reports an error on a bad file (directory in this case)" do
-
1
skip("ClamAV Daemon not running") unless basic_av_connection.alive?
-
scanner = described_class.new(__dir__)
-
expect(scanner.scan_response).to be_a(ClamAV::ErrorResponse)
-
end
-
-
1
it "errors out if the file doesn't exist" do
-
1
skip("ClamAV Daemon not running") unless basic_av_connection.alive?
-
scanner = described_class.new('asdfadsfasdfasdfasdf')
-
expect { scanner.infected? }.to raise_error(RuntimeError, /Can't open file/)
-
end
-
-
end
-
1
require 'rails_helper'
-
-
1
class MockJob
-
1
def run( )
-
end
-
-
end
-
-
-
1
RSpec.describe Hyrax::ApplicationJob::AboutToExpireEmbargoesJob do
-
-
1
describe "#perform" do
-
1
jobObject = MockJob.new
-
-
1
before {
-
3
allow(Deepblue::LoggingHelper).to receive(:here).and_return "here"
-
3
allow(Deepblue::LoggingHelper).to receive(:called_from).and_return "called from"
-
3
allow(Deepblue::LoggingHelper).to receive(:obj_class).with('class', an_instance_of(AboutToExpireEmbargoesJob)).and_return "bundt"
-
3
allow(Deepblue::LoggingHelper).to receive(:obj_class).with( 'args', ["cake"] ).and_return "frosting"
-
-
3
allow(Deepblue::LoggingHelper).to receive(:bold_debug).with([ "here", "called from", "bundt", "args=[\"cake\"]", "frosting", "" ])
-
-
3
allow(Deepblue::SchedulerHelper).to receive(:log).with( class_name: "AboutToExpireEmbargoesJob", event: "about_to_expire_embargoes" )
-
-
3
allow(Deepblue::LoggingHelper).to receive(:obj_class).with('options', {"cake"=>nil}).and_return "frosting"
-
3
allow(Deepblue::LoggingHelper).to receive(:bold_debug).with([[ "here", "options={\"cake\"=>nil}", "frosting", "" ]])
-
-
3
allow(subject).to receive(:job_options_value).with({"cake"=>nil}, {key: 'email_owner', default_value: true} ).and_return "email_owner"
-
3
allow(subject).to receive(:job_options_value).with({"cake"=>nil}, {key: 'expiration_lead_days'} ).and_return "lead_days"
-
3
allow(subject).to receive(:job_options_value).with({"cake"=>nil}, {key: 'skip_file_sets', default_value: true} ).and_return "skipping"
-
3
allow(subject).to receive(:job_options_value).with({"cake"=>nil}, {key: 'test_mode', default_value: false} ).and_return "la mode"
-
}
-
-
1
context "when verbose" do
-
1
before {
-
1
allow(subject).to receive(:job_options_value).with({"cake"=>nil}, {:key=>"verbose", :default_value=>false} ).and_return true
-
1
allow(Deepblue::LoggingHelper).to receive(:debug).with( "verbose=true" )
-
-
1
allow(Deepblue::LoggingHelper).to receive(:debug).with( "email_owner=email_owner" )
-
1
allow(Deepblue::LoggingHelper).to receive(:debug).with( "expiration_lead_days=lead_days" )
-
1
allow(Deepblue::LoggingHelper).to receive(:debug).with( "@skip_file_sets=skipping")
-
1
allow(Deepblue::LoggingHelper).to receive(:debug).with( "test_mode=la mode")
-
-
1
allow(Deepblue::AboutToExpireEmbargoesService).to receive(:new).with( email_owner: "email_owner",
-
expiration_lead_days: "lead_days",
-
skip_file_sets: "skipping",
-
test_mode: "la mode",
-
verbose: true ).and_return(jobObject)
-
1
allow(jobObject).to receive(:run)
-
}
-
-
1
it "calls Deepblue helper methods and logging debug method" do
-
1
expect(Deepblue::LoggingHelper).to receive(:bold_debug).with([ "here", "called from", "bundt", "args=[\"cake\"]", "frosting", "" ])
-
-
1
expect(Deepblue::SchedulerHelper).to receive(:log).with( class_name: "AboutToExpireEmbargoesJob", event: "about_to_expire_embargoes" )
-
-
1
expect(Deepblue::LoggingHelper).to receive(:bold_debug).with([ "here", "options={\"cake\"=>nil}", "frosting", "" ])
-
-
1
expect(subject).to receive(:job_options_value).with({"cake"=>nil}, {:key=>"verbose", :default_value=>false} ).and_return true
-
1
expect(Deepblue::LoggingHelper).to receive(:debug).with( "verbose=true" )
-
-
1
expect(subject).to receive(:job_options_value).with({"cake"=>nil}, {key: 'email_owner', default_value: true} ).and_return "email_owner"
-
1
expect(Deepblue::LoggingHelper).to receive(:debug).with( "email_owner=email_owner" )
-
-
1
expect(subject).to receive(:job_options_value).with({"cake"=>nil}, {key: 'expiration_lead_days'} ).and_return "lead_days"
-
1
expect(Deepblue::LoggingHelper).to receive(:debug).with( "expiration_lead_days=lead_days" )
-
-
1
expect(subject).to receive(:job_options_value).with({"cake"=>nil}, {key: 'skip_file_sets', default_value: true} ).and_return "skipping"
-
1
expect(Deepblue::LoggingHelper).to receive(:debug).with( "@skip_file_sets=skipping")
-
-
1
expect(subject).to receive(:job_options_value).with({"cake"=>nil}, {key: 'test_mode', default_value: false} ).and_return "la mode"
-
1
expect(Deepblue::LoggingHelper).to receive(:debug).with( "test_mode=la mode")
-
-
1
expect(Deepblue::AboutToExpireEmbargoesService).to receive(:new).with( email_owner: "email_owner",
-
expiration_lead_days: "lead_days",
-
skip_file_sets: "skipping",
-
test_mode: "la mode",
-
verbose: true ).and_return(jobObject)
-
1
subject.perform "cake"
-
end
-
end
-
-
1
context "when not verbose" do
-
1
before {
-
1
allow(subject).to receive(:job_options_value).with({"cake"=>nil}, {:key=>"verbose", :default_value=>false} ).and_return false
-
1
allow(Deepblue::LoggingHelper).to receive(:debug).with( "verbose=false" )
-
-
1
allow(Deepblue::AboutToExpireEmbargoesService).to receive(:new).with( email_owner: "email_owner",
-
expiration_lead_days: "lead_days",
-
skip_file_sets: "skipping",
-
test_mode: "la mode",
-
verbose: false ).and_return(jobObject)
-
}
-
-
1
it "calls Deepblue helper methods" do
-
1
expect(Deepblue::LoggingHelper).to receive(:bold_debug).with([ "here", "called from", "bundt", "args=[\"cake\"]", "frosting", "" ])
-
1
expect(Deepblue::SchedulerHelper).to receive(:log).with( class_name: "AboutToExpireEmbargoesJob", event: "about_to_expire_embargoes" )
-
1
expect(Deepblue::LoggingHelper).to receive(:bold_debug).with([ "here", "options={\"cake\"=>nil}", "frosting", "" ])
-
-
1
expect(subject).to receive(:job_options_value).with({"cake"=>nil}, {:key=>"verbose", :default_value=>false} ).and_return false
-
-
1
expect(subject).to receive(:job_options_value).with({"cake"=>nil}, {key: 'email_owner', default_value: true} ).and_return "email_owner"
-
-
1
expect(subject).to receive(:job_options_value).with({"cake"=>nil}, {key: 'expiration_lead_days'} ).and_return "lead_days"
-
-
1
expect(subject).to receive(:job_options_value).with({"cake"=>nil}, {key: 'skip_file_sets', default_value: true} ).and_return "skipping"
-
-
1
expect(subject).to receive(:job_options_value).with({"cake"=>nil}, {key: 'test_mode', default_value: false} ).and_return "la mode"
-
-
1
expect(Deepblue::AboutToExpireEmbargoesService).to receive(:new).with( email_owner: "email_owner",
-
expiration_lead_days: "lead_days",
-
skip_file_sets: "skipping",
-
test_mode: "la mode",
-
verbose: false ).and_return(jobObject)
-
1
subject.perform "cake"
-
end
-
end
-
-
1
context "when exception occurs" do
-
1
it "catches exceptions" do
-
1
skip "Add a test"
-
end
-
end
-
end
-
end
-
# frozen_string_literal: true
-
-
1
require 'rails_helper'
-
1
require 'uri'
-
1
require_relative '../../app/mailers/deepblue_mailer'
-
-
1
RSpec.configure do |config|
-
1
config.filter_run_excluding globus_enabled: :true unless DeepBlueDocs::Application.config.globus_enabled
-
end
-
-
1
class MailerMock
-
1
def deliver_now; end
-
end
-
-
# TODO: re-enable globus specs?
-
1
describe GlobusCopyJob, "GlobusJob globus_enabled: :true", globus_enabled: :true, skip: true do # rubocop:disable RSpec/DescribeMethod
-
-
1
let( :globus_dir ) { Pathname "/tmp/deepbluedata-globus" }
-
1
let( :globus_download_dir ) { globus_dir.join 'download' }
-
1
let( :target_name ) { "DeepBlueData_id321" }
-
1
let( :target_name_prep_dir ) { "#{GlobusJob.server_prefix(str: '_')}#{target_name}" }
-
1
let( :globus_prep_dir ) { globus_dir.join 'prep' }
-
1
let( :error_file ) { globus_prep_dir.join ".test.error.#{target_name}" }
-
1
let( :job_ready_file ) { globus_prep_dir.join ".test.ready.#{target_name}" }
-
1
let( :lock_file ) { globus_prep_dir.join ".test.lock.#{target_name}" }
-
1
let( :email_file ) { globus_prep_dir.join ".test.copy_job_emails.#{target_name}" }
-
-
1
describe "#perform" do
-
1
let( :user ) { FactoryBot.build(:user) }
-
1
let( :work ) { FactoryBot.build(:data_set, id: 'id321', title: ['test title'], user: user) }
-
1
let( :globus_download_ready_dir ) { globus_download_dir.join target_name }
-
1
let( :globus_download_ready_file1 ) { globus_download_ready_dir.join 'File01' }
-
1
let( :globus_download_ready_file2 ) { globus_download_ready_dir.join 'File02' }
-
1
let( :globus_download_ready_file_metadata ) { globus_download_ready_dir.join 'w_id321_metadata_report.txt' }
-
1
let( :file_set1 ) { FactoryBot.build(:file_set, label: 'File01', id: 'fs0001') }
-
1
let( :file_set2 ) { FactoryBot.build(:file_set, label: 'File02', id: 'fs0002') }
-
1
let( :file1 ) { Tempfile.new( "File01-" ) }
-
1
let( :file2 ) { Tempfile.new( "File02-" ) }
-
1
let( :uri1 ) { URI.join('file:///', file1.path.to_s ) }
-
1
let( :uri2 ) { URI.join('file:///', file2.path.to_s ) }
-
1
let( :ready_file ) { job_ready_file }
-
1
let( :log_prefix ) { "Globus: globus_copy_job" }
-
1
let( :lock_file_msg ) { "#{log_prefix} lock file #{lock_file}" }
-
1
let( :globus_prep_copy_dir ) { globus_prep_dir.join target_name_prep_dir }
-
1
let( :globus_prep_copy_tmp_dir ) { globus_prep_dir.join( target_name_prep_dir + '_tmp' ) }
-
1
let( :current_token ) { GlobusJob.era_token }
-
1
let( :user_email ) { "test@email.edu" }
-
1
let( :email_addresses ) { [ user_email ] }
-
1
let( :mailer ) { MailerMock.new }
-
-
1
context "when can acquire lock" do
-
1
before do
-
allow( ActiveFedora::Base ).to receive( :find ).and_return( work )
-
file_set1.define_singleton_method( :files ) do nil; end
-
file_set2.define_singleton_method( :files ) do nil; end
-
file1.define_singleton_method( :uri ) do nil; end
-
file2.define_singleton_method( :uri ) do nil; end
-
file1.define_singleton_method( :original_name ) do 'File01' end
-
file2.define_singleton_method( :original_name ) do 'File02' end
-
uri1.define_singleton_method( :value ) do nil; end
-
uri2.define_singleton_method( :value ) do nil; end
-
allow( file_set1 ).to receive( :files ).and_return( [file1] )
-
allow( file_set2 ).to receive( :files ).and_return( [file2] )
-
allow( file1 ).to receive( :uri ).and_return( uri1 )
-
allow( file2 ).to receive( :uri ).and_return( uri2 )
-
allow( uri1 ).to receive( :value ).and_return( file1.path )
-
allow( uri2 ).to receive( :value ).and_return( file2.path )
-
allow( work ).to receive( :file_sets ).and_return( [file_set1, file_set2] )
-
File.delete error_file if File.exist? error_file
-
File.delete lock_file if File.exist? lock_file
-
# Dir.delete globus_prep_copy_dir if Dir.exist? globus_prep_copy_dir
-
# Dir.delete globus_prep_copy_tmp_dir if Dir.exist? globus_prep_copy_tmp_dir
-
allow( Rails.logger ).to receive( :debug )
-
allow( Rails.logger ).to receive( :error )
-
allow( DeepblueMailer ).to receive( :send_an_email ).with( any_args ).and_return( mailer )
-
allow( mailer ).to receive( :deliver_now )
-
end
-
1
it "calls globus block." do
-
open( file1.path, 'w' ) { |f| f << "File01" << "\n" }
-
open( file2.path, 'w' ) { |f| f << "File02" << "\n" }
-
described_class.perform_now( "id321", user_email: user_email )
-
# expect( Rails.logger ).to have_received( :debug ).with( 'bogus so we can look at the logger output' )
-
file = '/tmp/deepbluedata-globus/prep/.test.error.DeepBlueData_id321'
-
if File.exist? file
-
puts ">>>>>>>>>>>>>>>>>>"
-
puts "Error file exists:"
-
open( file, 'r') { |f| puts f.readlines.join( "\n" ) }
-
puts ">>>>>>>>>>>>>>>>>>"
-
end
-
expect( Rails.logger ).to have_received( :debug ).with( "#{log_prefix} lock file #{lock_file}" )
-
expect( Rails.logger ).to have_received( :debug ).with( "#{log_prefix} writing lock token #{current_token} to #{lock_file}" )
-
expect( Rails.logger ).to have_received( :debug ).with( "#{log_prefix} begin copy" )
-
expect( Rails.logger ).to have_received( :debug ).with( "#{log_prefix} Starting export to #{globus_prep_copy_tmp_dir}" )
-
expect( Rails.logger ).to have_received( :debug ).with( "#{log_prefix} copy complete" )
-
# expect( Rails.logger ).to have_received( :debug ).with( 'bogus so we can look at the logger output' )
-
# expect( Rails.logger ).to have_received( :error ).with( 'bogus so we can look at the logger output' )
-
expect( Rails.logger ).not_to have_received( :error )
-
expect( File.exist?(ready_file) ).to eq( true )
-
expect( Dir.exist?(globus_download_ready_dir) ).to eq( true )
-
expect( Dir.exist?(globus_prep_copy_dir) ).to eq( false )
-
expect( Dir.exist?(globus_prep_copy_tmp_dir) ).to eq( false )
-
expect( File.exist?(globus_download_ready_file1) ).to eq( true )
-
expect( File.exist?(globus_download_ready_file2) ).to eq( true )
-
expect( File.exist?(globus_download_ready_file_metadata) ).to eq( true )
-
end
-
1
after do
-
File.delete email_file if File.exist? email_file
-
File.delete error_file if File.exist? error_file
-
File.delete lock_file if File.exist? lock_file
-
File.delete ready_file if File.exist? ready_file
-
File.delete globus_download_ready_file1 if File.exist? globus_download_ready_file1
-
File.delete globus_download_ready_file2 if File.exist? globus_download_ready_file2
-
File.delete globus_download_ready_file_metadata if File.exist? globus_download_ready_file_metadata
-
Dir.delete globus_download_ready_dir if Dir.exist? globus_download_ready_dir
-
end
-
end
-
end
-
-
1
describe "#globus_do_copy?" do
-
1
let( :job ) { described_class.new }
-
1
let( :target_file_name ) { "targetfile" }
-
1
let( :prep_file_name ) { globus_prep_dir.join target_file_name }
-
1
before do
-
prep_dir = globus_prep_dir
-
job.define_singleton_method( :set_parms ) do
-
@globus_concern_id = "id321"
-
@globus_log_prefix = "Globus: "
-
@target_prep_dir = prep_dir
-
end
-
job.set_parms
-
end
-
1
context "when prep file exists" do
-
1
before do
-
allow( File ).to receive( :exist? ).with( prep_file_name ).and_return( true )
-
msg = "Globus: skipping copy because #{prep_file_name} already exists"
-
allow(Rails.logger).to receive(:debug) { :failure }
-
allow( Rails.logger ).to receive( :debug ).with( msg )
-
end
-
1
it "returns false." do
-
expect( job.send( :globus_do_copy?, target_file_name ) ).to eq( false )
-
end
-
end
-
1
context "when prep file does not exist" do
-
1
before do
-
allow( File ).to receive( :exist? ).with( prep_file_name ).and_return( false )
-
end
-
1
it "returns true." do
-
expect( job.send( :globus_do_copy?, target_file_name ) ).to eq( true )
-
end
-
end
-
end
-
-
1
describe "#globus_job_complete_file" do
-
1
let( :job ) { described_class.new }
-
1
before do
-
job.define_singleton_method( :set_parms ) do @globus_concern_id = "id321"; end
-
job.set_parms
-
end
-
1
it "returns the ready file name." do
-
expect( job.send( :globus_job_complete_file ) ).to eq( job_ready_file )
-
end
-
end
-
-
1
describe "#globus_job_complete?" do
-
1
let( :job ) { described_class.new }
-
1
let( :job_complete_dir ) { globus_download_dir.join 'DeepBlueData_id321' }
-
1
before do
-
job.define_singleton_method( :set_parms ) do @globus_concern_id = "id321"; end
-
job.set_parms
-
end
-
1
context "when file exists" do
-
1
before do
-
allow( Dir ).to receive( :exist? ).with( job_complete_dir ).and_return( true )
-
end
-
1
it "return true." do
-
expect( job.send( :globus_job_complete? ) ).to eq( true )
-
end
-
end
-
1
context "when file does not exist" do
-
1
before do
-
allow( Dir ).to receive( :exist? ).with( job_complete_dir ).and_return( false )
-
end
-
1
it "return true." do
-
expect( job.send( :globus_job_complete? ) ).to eq( false )
-
end
-
end
-
end
-
-
# describe "#globus_notify_user" do
-
# # TODO
-
# end
-
-
1
describe "#globus_ready_file" do
-
1
let( :job ) { described_class.new }
-
1
before do
-
job.define_singleton_method( :set_parms ) do @globus_concern_id = "id321"; end
-
job.set_parms
-
end
-
1
it "returns the ready file name." do
-
expect( job.send( :globus_ready_file ) ).to eq( job_ready_file )
-
end
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
RSpec.configure do |config|
-
1
config.filter_run_excluding globus_enabled: :true unless DeepBlueDocs::Application.config.globus_enabled
-
end
-
-
# TODO: re-enable globus specs?
-
1
describe GlobusJob, "GlobusJob globus_enabled: :true", globus_enabled: :true, skip: true do # rubocop:disable RSpec/DescribeMethod
-
-
1
let( :globus_dir ) { Pathname.new "/tmp/deepbluedata-globus" }
-
1
let( :globus_download_dir ) { globus_dir.join 'download' }
-
1
let( :globus_prep_dir ) { globus_dir.join 'prep' }
-
1
let( :globus_target_download_dir ) { globus_download_dir.join 'DeepBlueData_id321' }
-
1
let( :globus_target_prep_dir ) { globus_prep_dir.join "#{GlobusJob.server_prefix(str: '_')}DeepBlueData_id321" }
-
1
let( :globus_target_prep_tmp_dir ) { globus_prep_dir.join "#{GlobusJob.server_prefix(str: '_')}DeepBlueData_id321" }
-
1
let( :error_file ) { globus_prep_dir.join '.test.error.DeepBlueData_id321' }
-
1
let( :lock_file ) { globus_prep_dir.join '.test.lock.DeepBlueData_id321' }
-
-
1
describe "GlobusJob#copy_complete?" do
-
1
context "directory exists in download dir" do
-
1
before do
-
allow( Dir ).to receive( :exist? ).with( globus_target_download_dir ).and_return( true )
-
end
-
1
it "returns true." do expect( GlobusJob.copy_complete?( "id321" ) ).to eq( true ); end
-
end
-
end
-
-
1
describe "GlobusJob#external_url" do
-
1
it "returns a globus external url." do
-
url = GlobusJob.external_url "id321"
-
expect( url ).to eq( "https://app.globus.org/file-manager?origin_id=99d8c648-a9ff-11e7-aedd-22000a92523b&origin_path=%2Fdownload%2FDeepBlueData_id321%2F" )
-
end
-
end
-
-
1
describe "GlobusJob#files_prepping?" do
-
1
context "directory exists in prep dir" do
-
1
before do
-
allow( Dir ).to receive( :exist? ).with( globus_target_download_dir ).and_return( false )
-
allow( File ).to receive( :exist? ).with( error_file ).and_return( false )
-
allow( GlobusJob ).to receive( :locked? ).with( "id321" ).and_return( true )
-
end
-
1
it "returns true." do expect( GlobusJob.files_prepping?( "id321" ) ).to eq( true ); end
-
end
-
end
-
-
1
describe "GlobusJob#locked?" do
-
1
context "lock file does not exist" do
-
1
before do
-
allow( File ).to receive( :exist? ).with( error_file ).and_return( false )
-
allow( File ).to receive( :exist? ).with( lock_file ).and_return( false )
-
end
-
1
it "returns true." do expect( GlobusJob.locked?( "id321" ) ).to eq( false ); end
-
end
-
## see context "#globus_locked?" for more tests
-
end
-
-
1
describe "GlobusJob#files_target_file_name" do
-
1
it "returns target file name." do
-
url = GlobusJob.files_target_file_name "id321"
-
expect( url ).to eq( "DeepBlueData_id321" )
-
end
-
end
-
-
1
describe "GlobusJob#globus_lock_file" do
-
1
it "returns the lock file name." do
-
expect( GlobusJob.lock_file("id321" ) ).to eq(lock_file )
-
end
-
end
-
-
1
describe "GlobusJob#target_base_name" do
-
1
it "returns a target base name." do
-
expect( GlobusJob.target_base_name( "id321" ) ).to eq( "DeepBlueData_id321" )
-
end
-
end
-
-
1
describe "GlobusJob#target_file_name" do
-
1
it "returns a target base name." do
-
expect( GlobusJob.target_file_name( Pathname.new( 'aDir' ), "aFile" ) ).to eq( Pathname.new( 'aDir' ).join( 'aFile' ) )
-
end
-
end
-
-
1
describe "GlobusJob#target_file_name_env" do
-
1
let( :file ) { Pathname.new( 'aDir' ).join( '.test.atype.basename' ) }
-
1
it "returns a target base name." do
-
expect( GlobusJob.target_file_name_env( Pathname.new( 'aDir' ), "atype", "basename" ) ).to eq( file )
-
end
-
end
-
-
1
describe "#globus_acquire_lock?" do
-
1
let( :job ) do
-
j = described_class.new
-
j.perform( "id321" )
-
j
-
end
-
1
context "when globus is locked" do
-
1
before do
-
allow( job ).to receive( :globus_locked? ).and_return( true )
-
end
-
1
it "returns false." do
-
expect( job.send( :globus_acquire_lock? ) ).to eq( false )
-
end
-
end
-
1
context "when globus is not locked" do
-
1
before do
-
allow( job ).to receive( :globus_locked? ).and_return( false )
-
allow( job ).to receive( :globus_lock ).and_return( true )
-
end
-
1
it "returns true." do
-
expect( job.send( :globus_acquire_lock? ) ).to eq( true )
-
end
-
end
-
end
-
-
1
describe "#globus_copy_job_complete?" do
-
1
let( :job ) do
-
j = described_class.new
-
j.perform( "abc" )
-
j
-
end
-
1
before do
-
allow( Dir ).to receive( :exist? ).with( globus_target_download_dir ).and_return( true )
-
end
-
1
it "returns true." do expect( job.send( :globus_copy_job_complete?, "id321" ) ).to eq( true ); end
-
end
-
-
1
describe "#globus_error_file" do
-
1
let( :job ) { j = described_class.new; j.perform( "id321" ); j } # rubocop:disable Style/Semicolon
-
1
it "returns the error file name." do
-
expect( job.send( :globus_error_file ) ).to eq( error_file )
-
end
-
end
-
-
1
describe "#globus_error" do
-
1
let( :job ) { j = described_class.new; j.perform( "id321" ); j } # rubocop:disable Style/Semicolon
-
1
let( :error_file_tmp ) { Tempfile.new( ".test.error.DeepBlueData_id321", globus_dir ) }
-
1
let( :error_msg ) { "An error message." }
-
1
before do
-
allow( job ).to receive( :globus_error_file ).and_return( error_file_tmp.path )
-
open( error_file_tmp.path, 'w' ) { |f| f << error_msg << "\n" }
-
msg = "Globus: writing error message to #{error_file_tmp.path}"
-
allow(Rails.logger).to receive(:debug) { :failure }
-
allow( Rails.logger ).to receive( :debug ).with( msg )
-
end
-
1
after do
-
error_file_tmp.delete
-
end
-
1
it "writes out the error" do
-
expect( job.send( :globus_error, error_msg ) ).to eq( error_file_tmp.path )
-
file_contents = nil
-
open( error_file_tmp.path, 'r' ) { |f| file_contents = f.read.chomp! }
-
expect( file_contents ).to eq( error_msg )
-
end
-
end
-
-
1
context "#globus_error_file_exists?" do
-
1
let( :job ) { j = described_class.new; j.perform( "id321" ); j } # rubocop:disable Style/Semicolon
-
1
context "error file exists" do
-
1
before do
-
allow( File ).to receive( :exist? ).with( error_file ).and_return( true )
-
end
-
1
it "returns true if error file exists." do
-
expect( job.send( :globus_error_file_exists? ) ).to eq( true )
-
end
-
end
-
1
context "error file exists and write to log flag is true" do
-
1
let( :job2 ) { j = described_class.new; j.perform( "id321" ); j } # rubocop:disable Style/Semicolon
-
1
let( :error_file_tmp ) { Tempfile.new( ".test.error.DeepBlueData_id321", globus_dir ) }
-
1
let( :error_msg ) { "An error message." }
-
1
before do
-
allow( job2 ).to receive( :globus_error_file ).and_return( error_file_tmp.path )
-
allow( GlobusJob ).to receive( :error_file ).and_return( error_file_tmp.path )
-
open( error_file_tmp.path, 'w' ) { |f| f << error_msg << "\n" }
-
allow(Rails.logger).to receive(:debug) { :failure }
-
allow( Rails.logger ).to receive( :debug ).with( "Globus: error file contains: #{error_msg}" )
-
end
-
1
after do
-
error_file_tmp.delete
-
end
-
1
it "writes to the log when error file exists" do
-
expect( job2.send( :globus_error_file_exists?, write_error_to_log: true ) ).to eq( true )
-
end
-
end
-
1
context "error file does not exist" do
-
1
before do
-
allow( File ).to receive( :exist? ).with( error_file ).and_return( false )
-
end
-
1
it "returns true if error file exists." do
-
expect( job.send( :globus_error_file_exists? ) ).to eq( false )
-
end
-
end
-
end
-
-
1
describe "#globus_error_reset" do
-
1
let( :job ) { j = described_class.new; j.perform( "id321" ); j } # rubocop:disable Style/Semicolon
-
1
context "when error file exists." do
-
1
before do
-
allow( File ).to receive( :exist? ).with( error_file ).and_return( true )
-
allow( File ).to receive( :delete ).with( error_file )
-
end
-
1
it "return true when file exists." do
-
expect( job.send( :globus_error_reset ) ).to eq( true )
-
end
-
end
-
1
context "when error file doesn't exist." do
-
1
before do
-
allow( File ).to receive( :exist? ).with( error_file ).and_return( false )
-
end
-
1
it "return true when file doesn't exist." do
-
expect( File ).not_to receive( :delete )
-
expect( job.send( :globus_error_reset ) ).to eq( true )
-
end
-
end
-
end
-
-
1
context "#globus_job_complete" do
-
1
let( :job ) { j = described_class.new; j.perform( "id321" ); j } # rubocop:disable Style/Semicolon
-
1
let( :complete_file_tmp ) { Tempfile.new( ".test.complete.DeepBlueData_id321", globus_dir ) }
-
1
before do
-
job.define_singleton_method( :globus_job_complete_file ) do "let the expect define the return value"; end
-
allow( job ).to receive( :globus_job_complete_file ).and_return( complete_file_tmp.path )
-
# log_msg = "Globus: job complete at #{timestamp}"
-
allow( Rails.logger ).to receive( :debug )
-
end
-
1
after do
-
complete_file_tmp.delete
-
end
-
1
it "writes out the globus complete file." do
-
before = Time.now.round(0) - 1.second
-
expect( job.send( :globus_job_perform_complete ) ).to eq(complete_file_tmp.path )
-
after = Time.now.round(0) + 1.second
-
file_contents = nil
-
open( complete_file_tmp.path, 'r' ) { |f| file_contents = f.read.chomp! }
-
between = Time.parse file_contents
-
expect( between ).to be_between( before, after )
-
end
-
end
-
-
1
describe "#globus_job_perform" do
-
1
let( :job ) { j = described_class.new; j.perform( "id321" ); j } # rubocop:disable Style/Semicolon
-
1
let( :globus_block ) { -> { job.inside_block } }
-
1
let( :lock_file_msg ) { "Globus: lock file #{lock_file}" }
-
1
before do
-
job.define_singleton_method( :globus_job_complete? ) do true; end
-
job.define_singleton_method( :inside_block ) do true; end
-
end
-
-
1
context "when job complete" do
-
1
before do
-
allow( job ).to receive( :globus_job_complete? ).and_return( true )
-
allow(Rails.logger).to receive(:debug) { :failure }
-
allow( Rails.logger ).to receive( :debug ).with( "Globus: skipping already complete globus job" )
-
end
-
1
it "does not call globus block" do
-
expect( GlobusJob.class_variable_get( :@@globus_enabled ) ).to eq( true )
-
expect( job ).not_to receive( :globus_acquire_lock? )
-
expect( job ).not_to receive( :inside_block )
-
job.send( :globus_job_perform, concern_id: "id321", &globus_block )
-
end
-
end
-
1
context "when can't acquire lock" do
-
1
before do
-
allow( job ).to receive( :globus_job_complete? ).and_return( false )
-
allow( job ).to receive( :globus_acquire_lock? ).and_return( false )
-
allow( job ).to receive( :globus_job_perform_in_progress )
-
allow(Rails.logger).to receive(:debug) { :failure }
-
allow( Rails.logger ).to receive( :debug ).with( lock_file_msg )
-
end
-
1
it "does not call globus block." do
-
expect( GlobusJob.class_variable_get( :@@globus_enabled ) ).to eq( true )
-
expect( job ).not_to receive( :inside_block )
-
job.send( :globus_job_perform, concern_id: "id321", &globus_block )
-
end
-
end
-
1
context "when can acquire lock" do
-
1
before do
-
allow( job ).to receive( :globus_job_complete? ).and_return( false )
-
allow( job ).to receive( :globus_acquire_lock? ).and_return( true )
-
allow(Rails.logger).to receive(:debug) { :failure }
-
allow( Rails.logger ).to receive( :debug ).with( lock_file_msg )
-
allow( job ).to receive( :globus_error_reset )
-
allow( job ).to receive( :globus_job_perform_complete_reset )
-
allow( job ).to receive( :inside_block )
-
allow( job ).to receive( :globus_job_perform_complete )
-
allow( job ).to receive( :globus_unlock )
-
end
-
1
it "calls globus block." do
-
expect( GlobusJob.class_variable_get( :@@globus_enabled ) ).to eq( true )
-
job.send( :globus_job_perform, concern_id: "id321", &globus_block )
-
expect( job ).to have_received( :globus_unlock ).exactly( 2 ).times
-
end
-
end
-
1
context "when can acquire lock and error is thrown inside block," do
-
1
before do
-
allow( job ).to receive( :globus_job_complete? ).and_return( false )
-
allow( job ).to receive( :globus_acquire_lock? ).and_return( true )
-
allow(Rails.logger).to receive(:debug) { :failure }
-
allow( Rails.logger ).to receive( :debug ).with( lock_file_msg )
-
allow( job ).to receive( :globus_error_reset )
-
allow( job ).to receive( :globus_job_perform_complete_reset )
-
allow( job ).to receive( :inside_block ).and_raise( StandardError, "generated error" )
-
allow( Rails.logger ).to receive( :error )
-
allow( job ).to receive( :globus_error ).with( /^Globus: StandardError: generated error at/ )
-
allow( job ).to receive( :globus_unlock )
-
end
-
1
it "calls globus block." do
-
expect( GlobusJob.class_variable_get( :@@globus_enabled ) ).to eq( true )
-
expect( job ).not_to receive( :globus_job_perform_complete )
-
job.send( :globus_job_perform, concern_id: "id321", &globus_block )
-
expect( Rails.logger ).to have_received( :error ).with( /^Globus: StandardError: generated error at/ )
-
end
-
end
-
end
-
-
1
describe "#globus_job_complete_reset" do
-
1
let( :job ) { j = described_class.new; j.perform( "id321" ); j } # rubocop:disable Style/Semicolon
-
1
let( :complete_file ) { "#{globus_dir}/prep/.test.complete.DeepBlueData_id321" }
-
1
context "when complete file exists." do
-
1
before do
-
job.define_singleton_method( :globus_job_complete_file ) do "let the expect define the return value"; end
-
allow( job ).to receive( :globus_job_complete_file ).and_return( complete_file )
-
allow( File ).to receive( :exist? ).with( complete_file ).and_return( true )
-
allow( File ).to receive( :delete ).with( complete_file )
-
end
-
1
it "return true when file exists." do
-
expect( job.send( :globus_job_perform_complete_reset ) ).to eq(true )
-
end
-
end
-
1
context "when complete file doesn't exist." do
-
1
before do
-
job.define_singleton_method( :globus_job_complete_file ) do "let the expect define the return value"; end
-
allow( job ).to receive( :globus_job_complete_file ).and_return( complete_file )
-
allow( File ).to receive( :exist? ).with( complete_file ).and_return( false )
-
end
-
1
it "return true when file doesn't exist." do
-
expect( File ).not_to receive( :delete )
-
expect( job.send( :globus_job_perform_complete_reset ) ).to eq(true )
-
end
-
end
-
end
-
-
1
context "#globus_lock" do
-
1
let( :job ) { j = described_class.new; j.perform( "id321" ); j } # rubocop:disable Style/Semicolon
-
1
let( :lock_file_tmp ) { Tempfile.new( ".test.lock.DeepBlueData_id321", globus_dir ) }
-
1
let( :current_token ) { GlobusJob.era_token }
-
1
before do
-
allow( GlobusJob ).to receive( :lock_file ).and_return( lock_file_tmp.path )
-
log_msg = "Globus: writing lock token #{current_token} to #{lock_file_tmp.path}"
-
allow(Rails.logger).to receive(:debug) { :failure }
-
allow( Rails.logger ).to receive( :debug ).with( log_msg )
-
end
-
1
after do
-
lock_file_tmp.delete
-
end
-
1
it "creates a lock file with the current token in it." do
-
expect( job.send( :globus_lock ) ).to eq( true )
-
file_lock_token = nil
-
open( lock_file_tmp.path, 'r' ) { |f| file_lock_token = f.read.chomp! }
-
expect( file_lock_token ).to eq( current_token )
-
end
-
end
-
-
1
context "#globus_locked?" do
-
1
context "If error file exists" do
-
1
let( :job ) { j = described_class.new; j.perform( "id321" ); j } # rubocop:disable Style/Semicolon
-
1
before do
-
allow( GlobusJob ).to receive( :error_file_exists? ).and_return( true )
-
end
-
1
it "then return false if error file exists." do
-
expect( job.send( :globus_locked? ) ).to eq( false )
-
end
-
end
-
1
context "If lock file does not exist" do
-
1
let( :job ) { j = described_class.new; j.perform( "id321" ); j } # rubocop:disable Style/Semicolon
-
1
before do
-
allow( GlobusJob ).to receive( :error_file_exists? ).and_return( false )
-
allow( File ).to receive( :exist? ).with( lock_file ).and_return( false )
-
end
-
1
it "then return false." do
-
expect( job.send( :globus_locked? ) ).to eq( false )
-
end
-
end
-
1
context "lock file exists with different token." do
-
1
let( :job ) { j = described_class.new; j.perform( "id321" ); j } # rubocop:disable Style/Semicolon
-
1
let( :lock_file_tmp ) { Tempfile.new( ".test.lock.DeepBlueData_id321", globus_dir ) }
-
1
let( :current_token ) { GlobusJob.era_token }
-
1
let( :lock_token ) { "theToken" }
-
1
before do
-
allow( GlobusJob ).to receive( :error_file_exists? ).and_return( false )
-
allow( GlobusJob ).to receive( :lock_file ).and_return( lock_file_tmp.path )
-
open( lock_file_tmp.path, 'w' ) { |f| f << lock_token << "\n" }
-
log_msg = "Globus: testing token from #{lock_file_tmp.path}: current_token: #{current_token} == lock_token: #{lock_token}: false"
-
allow(Rails.logger).to receive(:debug) { :failure }
-
allow( Rails.logger ).to receive( :debug ).with( log_msg )
-
end
-
1
after do
-
lock_file_tmp.delete
-
end
-
1
it "then returns false when tokens are not equal." do
-
expect( job.send( :globus_locked? ) ).to eq( false )
-
end
-
end
-
1
context "lock file exists with same token." do
-
1
let( :job ) { j = described_class.new; j.perform( "id321" ); j } # rubocop:disable Style/Semicolon
-
1
let( :lock_file_tmp ) { Tempfile.new( ".test.lock.DeepBlueData_id321", globus_dir ) }
-
1
let( :current_token ) { GlobusJob.era_token }
-
1
let( :lock_token ) { GlobusJob.era_token }
-
1
before do
-
allow( GlobusJob ).to receive( :error_file_exists? ).and_return( false )
-
allow( GlobusJob ).to receive( :lock_file ).and_return(lock_file_tmp.path )
-
open( lock_file_tmp.path, 'w' ) { |f| f << lock_token << "\n" }
-
log_msg = "Globus: testing token from #{lock_file_tmp.path}: current_token: #{current_token} == lock_token: #{lock_token}: true"
-
allow(Rails.logger).to receive(:debug) { :failure }
-
allow( Rails.logger ).to receive( :debug ).with( log_msg )
-
end
-
1
after do
-
lock_file_tmp.delete
-
end
-
1
it "then returns true when tokens are equal." do
-
expect( job.send( :globus_locked? ) ).to eq( true )
-
end
-
end
-
end
-
-
1
context "#globus_unlock" do
-
1
context "when globus lock file is nil" do
-
1
let( :job ) { j = described_class.new; j.perform( "id321" ); j } # rubocop:disable Style/Semicolon
-
1
before do
-
job.define_singleton_method( :set_globus_lock_file_nil ) do @globus_lock_file = nil; end
-
job.set_globus_lock_file_nil
-
end
-
1
it "then return nil" do
-
expect( job.send( :globus_unlock ) ).to eq( nil )
-
end
-
end
-
1
context "when globus lock file does not exist" do
-
1
let( :job ) { j = described_class.new; j.perform( "id321" ); j } # rubocop:disable Style/Semicolon
-
1
before do
-
allow( File ).to receive( :exist? ).with( lock_file ).and_return( false )
-
end
-
1
it "then return nil" do
-
expect( File ).not_to receive( :delete )
-
expect( job.send( :globus_unlock ) ).to eq( nil )
-
end
-
end
-
1
context "when globus lock file exists" do
-
1
let( :job ) { j = described_class.new; j.perform( "id321" ); j } # rubocop:disable Style/Semicolon
-
1
before do
-
allow( File ).to receive( :exist? ).with( lock_file ).and_return( true )
-
allow( File ).to receive( :delete ).with( lock_file )
-
log_msg = "Globus: unlock by deleting file #{lock_file}"
-
allow(Rails.logger).to receive(:debug) { :failure }
-
allow( Rails.logger ).to receive( :debug ).with( log_msg )
-
end
-
1
it "then return nil" do
-
expect( job.send( :globus_unlock ) ).to eq( nil )
-
end
-
end
-
end
-
-
1
context "#target_dir_name" do
-
1
let( :job ) { described_class.new }
-
1
let( :dir ) { Pathname.new( 'aDir' ).join( 'aSubdir' ) }
-
1
context "don't create dir." do
-
1
it "returns a target base name." do
-
expect( job.send(:target_dir_name2, Pathname.new('aDir' ), "aSubdir" ) ).to eq(dir )
-
end
-
end
-
1
context "create dir if it doesn't exist." do
-
1
before do
-
allow( Dir ).to receive( :exist? ).with( dir ).and_return( false )
-
allow( Dir ).to receive( :mkdir ).with( dir )
-
end
-
1
it "returns a target base name and creates the dir." do
-
expect( job.send(:target_dir_name2, Pathname.new('aDir' ), "aSubdir", mkdir: true ) ).to eq(dir )
-
end
-
end
-
1
context "don't create dir if it exists." do
-
1
before do
-
allow( Dir ).to receive( :exist? ).with( dir ).and_return( true )
-
end
-
1
it "returns a target base name and doesn't create the dir." do
-
expect( job.send(:target_dir_name2, Pathname.new('aDir' ), "aSubdir", mkdir: false ) ).to eq(dir )
-
end
-
end
-
end
-
-
1
describe "#target_download_dir" do
-
1
let( :job ) { described_class.new }
-
1
it "returns target dowload dir name." do
-
expect( job.send(:target_download_dir2, "id321" ) ).to eq(globus_target_download_dir )
-
end
-
end
-
-
1
context "#target_prep_dir" do
-
1
let( :job ) { described_class.new }
-
1
let( :prefix ) { GlobusJob.server_prefix(str: '_') }
-
1
let( :dir ) { globus_prep_dir.join "#{prefix}DeepBlueData_id321" }
-
1
context "don't create prep dir." do
-
1
it "returns a prep dir name." do
-
expect( job.send(:target_prep_dir2, "id321", prefix: prefix ) ).to eq(dir )
-
end
-
end
-
1
context "create prep dir if it doesn't exist." do
-
1
before do
-
allow( Dir ).to receive( :exist? ).with( dir ).and_return( false )
-
allow( Dir ).to receive( :mkdir ).with( dir )
-
end
-
1
it "returns prep dir name and creates the dir." do
-
expect( job.send(:target_prep_dir2, "id321", prefix: prefix, mkdir: true ) ).to eq(dir )
-
end
-
end
-
1
context "don't create prep dir if it exists." do
-
1
it "returns prep dir name name and doesn't create the dir." do
-
expect( job.send(:target_prep_dir2, "id321", prefix: prefix, mkdir: false ) ).to eq(dir )
-
end
-
end
-
end
-
-
1
context "#target_prep_dir_tmp" do
-
1
let( :job ) { described_class.new }
-
1
let( :prefix ) { GlobusJob.server_prefix(str: '_') }
-
1
let( :dir ) { globus_prep_dir.join "#{prefix}DeepBlueData_id321_tmp" }
-
1
context "don't create tmp prep dir." do
-
1
it "returns tmp prep dir name." do
-
expect( job.send(:target_prep_tmp_dir2, "id321", prefix: prefix ) ).to eq(dir )
-
end
-
end
-
1
context "create tmp prep dir if it doesn't exist." do
-
1
before do
-
allow( Dir ).to receive( :exist? ).with( dir ).and_return( false )
-
allow( Dir ).to receive( :mkdir ).with( dir )
-
end
-
1
it "returns tmp prep dir name and creates the dir." do
-
expect( job.send(:target_prep_tmp_dir2, "id321", prefix: prefix, mkdir: true ) ).to eq(dir )
-
end
-
end
-
1
context "don't create tmp prep dir if it exists." do
-
1
it "returns tmp prep dir name name and doesn't create the dir." do
-
expect( job.send(:target_prep_tmp_dir2, "id321", prefix: prefix, mkdir: false ) ).to eq(dir )
-
end
-
end
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
require 'rails_helper'
-
# require 'uri'
-
-
1
RSpec.configure do |config|
-
1
config.filter_run_excluding globus_enabled: :true unless DeepBlueDocs::Application.config.globus_enabled
-
end
-
-
1
describe GlobusRestartAllJob, "GlobusJob globus_enabled: :true", globus_enabled: :true do # rubocop:disable RSpec/DescribeMethod
-
-
6
let( :globus_dir ) { Pathname "/tmp/deepbluedata-globus" }
-
6
let( :target_name ) { "DataCORE_Restart_All" }
-
# let( :target_name_prep_dir ) { "#{GlobusJob.server_prefix(str:'_')}#{target_name}" }
-
6
let( :globus_prep_dir ) { globus_dir.join 'prep' }
-
6
let( :job_complete_file ) { globus_prep_dir.join ".test.restarted.#{target_name}" }
-
2
let( :error_file ) { globus_prep_dir.join ".test.error.#{target_name}" }
-
2
let( :lock_file ) { globus_prep_dir.join ".test.lock.#{target_name}" }
-
-
1
describe "#perform" do
-
1
context "when can acquire lock" do
-
1
let( :job ) do
-
j = GlobusCopyJob.new
-
j.define_singleton_method( :set_id, id ) do |id|
-
@globus_concern_id = id
-
self
-
end
-
j
-
end
-
2
let( :log_prefix ) { "Globus: globus_restart_all_job " }
-
2
let( :globus_era_file ) { GlobusJob.era_token }
-
2
let( :target ) { "DataCORE_" }
-
2
let( :id00 ) { "id000" }
-
2
let( :file00 ) { "#{globus_prep_dir}/.development.lock.#{target}#{id00}" }
-
2
let( :dir00 ) { "#{globus_prep_dir}/development_#{target}#{id00}" }
-
2
let( :dir00tmp ) { "#{dir00}_tmp" }
-
2
let( :id01 ) { "id001" }
-
2
let( :file01 ) { "#{globus_prep_dir}/.test.lock.#{target}#{id01}" }
-
2
let( :dir01 ) { "#{globus_prep_dir}/test_#{target}#{id01}" }
-
2
let( :dir01tmp ) { "#{dir01}_tmp" }
-
2
let( :id02 ) { "id002" }
-
2
let( :file02 ) { "#{globus_prep_dir}/.test.error.#{target}#{id02}" }
-
1
let( :files ) do
-
1
[ globus_era_file,
-
file00,
-
dir00,
-
dir00tmp,
-
file01,
-
dir01,
-
dir01tmp,
-
file02,
-
file03,
-
dir04,
-
dir05tmp ].map( &:to_s )
-
end
-
2
let( :id03 ) { "id003" }
-
2
let( :file03 ) { "#{globus_prep_dir}/.test.lock.#{target}#{id03}" }
-
2
let( :id04 ) { "id004" }
-
2
let( :dir04 ) { "#{globus_prep_dir}/test_#{target}#{id04}" }
-
2
let( :id05 ) { "id005" }
-
2
let( :dir05tmp ) { "#{globus_prep_dir}/test_#{target}#{id05}_tmp" }
-
1
before do
-
1
File.delete lock_file if File.exist? lock_file
-
1
File.delete error_file if File.exist? error_file
-
1
File.delete job_complete_file if File.exist? job_complete_file
-
1
allow( Rails.logger ).to receive( :debug ).with( any_args )
-
1
allow( Dir ).to receive( :glob ).with( any_args ).and_return( files )
-
1
allow( GlobusCopyJob ).to receive( :perform_later ).with( any_args )
-
end
-
1
it "calls globus block." do
-
1
described_class.perform_now
-
# expect( Rails.logger ).to have_received( :debug ).with( 'bogus so we can look at the logger output' )
-
1
expect( GlobusCopyJob ).to have_received( :perform_later ).with( id01 )
-
1
expect( GlobusCopyJob ).to have_received( :perform_later ).with( id02 )
-
1
expect( GlobusCopyJob ).to have_received( :perform_later ).with( id03 )
-
1
expect( GlobusCopyJob ).to have_received( :perform_later ).with( id04 )
-
1
expect( GlobusCopyJob ).to have_received( :perform_later ).with( id05 )
-
1
expect( GlobusCopyJob ).to have_received( :perform_later ).exactly( 5 ).times
-
1
expect( Rails.logger ).to have_received( :debug ).with( "#{log_prefix}restart all complete" )
-
# expect( Rails.logger ).to have_received( :debug ).with( 'bogus so we can look at the logger output' )
-
# expect( Rails.logger ).not_to have_received( :error )
-
1
expect( File.exist?( job_complete_file ) ).to eq( true )
-
1
expect( File.exist?( error_file ) ).to eq( false )
-
1
expect( File.exist?( lock_file ) ).to eq( false )
-
end
-
1
after do
-
1
File.delete job_complete_file if File.exist? job_complete_file
-
end
-
end
-
end
-
-
1
describe "#globus_job_complete_file" do
-
2
let( :job ) { described_class.new }
-
1
before do
-
2
job.define_singleton_method( :set_parms ) do @globus_concern_id = "Restart_All"; end
-
1
job.set_parms
-
end
-
1
it "returns the ready file name." do
-
1
expect( job.send( :globus_job_complete_file ) ).to eq( job_complete_file )
-
end
-
end
-
-
1
describe "#globus_job_complete?" do
-
4
let( :job ) { described_class.new }
-
4
let( :job_complete_msg ) { " globus job complete file #{job_complete_file}" }
-
3
let( :time_now ) { Time.now }
-
2
let( :time_before_now ) { time_now - 9.seconds }
-
-
1
before do
-
6
job.define_singleton_method( :set_parms ) do @globus_concern_id = "Restart_All"; end
-
3
job.set_parms
-
end
-
1
context "when file does not exist" do
-
1
before do
-
1
allow( Rails.logger ).to receive( :debug )
-
1
allow( File ).to receive( :exist? ).with( job_complete_file ).and_return( false )
-
end
-
1
it "return true." do
-
1
expect( job.send( :globus_job_complete? ) ).to eq( false )
-
1
expect( Rails.logger ).to have_received( :debug ).with( job_complete_msg )
-
end
-
end
-
1
context "when file exists and time matches" do
-
1
before do
-
1
allow( Rails.logger ).to receive( :debug )
-
1
allow( File ).to receive( :exist? ).with( job_complete_file ).and_return( true )
-
# allow( File ).to receive( :birthtime ).with( job_complete_file ).and_return( time_now )
-
1
allow( job ).to receive( :last_complete_time ).with( job_complete_file ).and_return( time_now )
-
1
allow( GlobusJob ).to receive( :era_token_time ).with( no_args ).and_return( time_now )
-
end
-
1
it "return true." do
-
1
expect( job.send( :globus_job_complete? ) ).to eq( true )
-
1
expect( Rails.logger ).to have_received( :debug ).with( job_complete_msg )
-
# expect( Rails.logger ).to have_received( :debug ).with( 'bogus' )
-
end
-
end
-
1
context "when file exists and time does not match" do
-
1
before do
-
1
allow( Rails.logger ).to receive( :debug )
-
1
allow( File ).to receive( :exist? ).with( job_complete_file ).and_return( true )
-
# allow( File ).to receive( :birthtime ).with( job_complete_file ).and_return( time_before_now )
-
1
allow( job ).to receive( :last_complete_time ).with( job_complete_file ).and_return( time_before_now )
-
1
allow( GlobusJob ).to receive( :era_token_time ).with( no_args ).and_return( time_now )
-
end
-
1
it "return false." do
-
1
expect( job.send( :globus_job_complete? ) ).to eq( false )
-
1
expect( Rails.logger ).to have_received( :debug ).with( job_complete_msg )
-
# expect( Rails.logger ).to have_received( :debug ).with( 'bogus' )
-
end
-
end
-
end
-
-
end
-
1
require 'rails_helper'
-
-
1
RSpec.describe DeepblueMailer do
-
-
1
describe '#send_an_email' do
-
1
before {
-
1
allow(subject).to receive(:mail).with(to: "to", from: "from", subject: "subject", body: "body")
-
}
-
-
1
it "calls email function" do
-
1
expect(subject).to receive(:mail).with(to: "to", from: "from", subject: "subject", body: "body")
-
-
1
subject.send_an_email to: "to", from: "from", subject: "subject", body: "body"
-
end
-
end
-
-
end
-
1
require 'rails_helper'
-
-
1
RSpec.describe Ability do
-
2
let(:user) { FactoryBot.create :user }
-
3
let(:options) { {} }
-
3
let(:ability) { described_class.new(user, options) }
-
-
1
describe '#can_deposit?' do
-
1
context 'when neither an admin nor depositor' do
-
1
it 'returns false' do
-
1
expect(ability.admin?).to be false
-
1
expect(ability.depositor?).to be false
-
1
expect(ability.can? :create, DataSet).to be false
-
end
-
end
-
1
context 'when a depositor' do
-
2
let(:admin_set) { AdminSet.find(AdminSet.find_or_create_default_admin_set_id) }
-
1
before do
-
# creates permission template and depositor permissions
-
1
Hyrax::AdminSetCreateService.new(admin_set: admin_set, creating_user: user).create
-
end
-
1
it 'returns true' do
-
expect(ability.admin?).to be false
-
expect(ability.depositor?).to be true
-
expect(ability.can? :create, DataSet).to be true
-
end
-
end
-
1
context 'when an admin' do
-
2
let(:user) { FactoryBot.create :admin }
-
1
it 'returns true' do
-
1
expect(ability.admin?).to be true
-
1
expect(ability.depositor?).to be false
-
1
expect(ability.can? :create, DataSet).to be true
-
end
-
end
-
end
-
end
-
1
require 'rails_helper'
-
-
1
RSpec.describe Collection do
-
1
it "has tests" do
-
1
skip "Add your tests here"
-
end
-
end
-
# frozen_string_literal: true
-
-
1
class CurationConcernMock
-
1
include ::Deepblue::AbstractEventBehavior
-
end
-
-
1
RSpec.describe Deepblue::AbstractEventBehavior do
-
-
9
let( :event ) { 'the_event' }
-
9
let( :id ) { 'id1234' }
-
5
let( :behavior ) { 'some_behavior' }
-
3
let( :key ) { "#{id}.#{event}" }
-
5
let( :key_with_behavior ) { "#{id}.#{event}.#{behavior}" }
-
5
let( :cache_value ) { 'the cache value ' }
-
-
1
describe 'constants' do
-
1
it do
-
1
expect( Deepblue::AbstractEventBehavior::EVENT_CHARACTERIZE ).to eq 'characterize'
-
1
expect( Deepblue::AbstractEventBehavior::EVENT_CHILD_ADD ).to eq 'child_add'
-
1
expect( Deepblue::AbstractEventBehavior::EVENT_CHILD_REMOVE ).to eq 'child_remove'
-
1
expect( Deepblue::AbstractEventBehavior::EVENT_CREATE ).to eq 'create'
-
1
expect( Deepblue::AbstractEventBehavior::EVENT_CREATE_DERIVATIVE ).to eq 'create_derivative'
-
1
expect( Deepblue::AbstractEventBehavior::EVENT_DESTROY ).to eq 'destroy'
-
1
expect( Deepblue::AbstractEventBehavior::EVENT_DOWNLOAD ).to eq 'download'
-
1
expect( Deepblue::AbstractEventBehavior::EVENT_EMBARGO ).to eq 'embargo'
-
1
expect( Deepblue::AbstractEventBehavior::EVENT_FIXITY_CHECK ).to eq 'fixity_check'
-
1
expect( Deepblue::AbstractEventBehavior::EVENT_GLOBUS ).to eq 'globus'
-
1
expect( Deepblue::AbstractEventBehavior::EVENT_INGEST ).to eq 'ingest'
-
1
expect( Deepblue::AbstractEventBehavior::EVENT_MIGRATE ).to eq 'migrate'
-
1
expect( Deepblue::AbstractEventBehavior::EVENT_MINT_DOI ).to eq 'mint_doi'
-
1
expect( Deepblue::AbstractEventBehavior::EVENT_PUBLISH ).to eq 'publish'
-
1
expect( Deepblue::AbstractEventBehavior::EVENT_TOMBSTONE ).to eq 'tombstone'
-
1
expect( Deepblue::AbstractEventBehavior::EVENT_UNEMBARGO ).to eq 'unembargo'
-
1
expect( Deepblue::AbstractEventBehavior::EVENT_UNPUBLISH ).to eq 'unpublish'
-
1
expect( Deepblue::AbstractEventBehavior::EVENT_UPDATE ).to eq 'update'
-
1
expect( Deepblue::AbstractEventBehavior::EVENT_UPDATE_AFTER ).to eq 'update_after'
-
1
expect( Deepblue::AbstractEventBehavior::EVENT_UPDATE_BEFORE ).to eq 'update_before'
-
1
expect( Deepblue::AbstractEventBehavior::EVENT_UPDATE_VERSION ).to eq 'update_version'
-
1
expect( Deepblue::AbstractEventBehavior::EVENT_UPLOAD ).to eq 'upload'
-
1
expect( Deepblue::AbstractEventBehavior::EVENT_VIRUS_SCAN ).to eq 'virus_scan'
-
1
expect( Deepblue::AbstractEventBehavior::EVENT_WORKFLOW ).to eq 'workflow'
-
1
expect( Deepblue::AbstractEventBehavior::EVENTS ).to eq [
-
Deepblue::AbstractEventBehavior::EVENT_CHARACTERIZE,
-
Deepblue::AbstractEventBehavior::EVENT_CHILD_ADD,
-
Deepblue::AbstractEventBehavior::EVENT_CHILD_REMOVE,
-
Deepblue::AbstractEventBehavior::EVENT_CREATE,
-
Deepblue::AbstractEventBehavior::EVENT_CREATE_DERIVATIVE,
-
Deepblue::AbstractEventBehavior::EVENT_DESTROY,
-
Deepblue::AbstractEventBehavior::EVENT_DOWNLOAD,
-
Deepblue::AbstractEventBehavior::EVENT_EMBARGO,
-
Deepblue::AbstractEventBehavior::EVENT_FIXITY_CHECK,
-
Deepblue::AbstractEventBehavior::EVENT_GLOBUS,
-
Deepblue::AbstractEventBehavior::EVENT_INGEST,
-
Deepblue::AbstractEventBehavior::EVENT_MIGRATE,
-
Deepblue::AbstractEventBehavior::EVENT_MINT_DOI,
-
Deepblue::AbstractEventBehavior::EVENT_PUBLISH,
-
Deepblue::AbstractEventBehavior::EVENT_TOMBSTONE,
-
Deepblue::AbstractEventBehavior::EVENT_UNEMBARGO,
-
Deepblue::AbstractEventBehavior::EVENT_UNPUBLISH,
-
Deepblue::AbstractEventBehavior::EVENT_UPDATE,
-
Deepblue::AbstractEventBehavior::EVENT_UPDATE_AFTER,
-
Deepblue::AbstractEventBehavior::EVENT_UPDATE_BEFORE,
-
Deepblue::AbstractEventBehavior::EVENT_UPDATE_VERSION,
-
Deepblue::AbstractEventBehavior::EVENT_UPLOAD,
-
Deepblue::AbstractEventBehavior::EVENT_VIRUS_SCAN,
-
Deepblue::AbstractEventBehavior::EVENT_WORKFLOW
-
]
-
1
expect( Deepblue::AbstractEventBehavior::IGNORE_BLANK_KEY_VALUES ).to eq true
-
1
expect( Deepblue::AbstractEventBehavior::USE_BLANK_KEY_VALUES ).to eq false
-
end
-
end
-
-
1
describe '.event_attributes_cache_exist?' do
-
3
subject { CurationConcernMock.new }
-
1
context 'with behavior' do
-
1
before do
-
1
allow( Rails.cache ).to receive( :exist? ).with( key_with_behavior ).and_return true
-
end
-
1
it do
-
1
expect( subject.event_attributes_cache_exist?( event: event, id: id, behavior: behavior ) ).to eq true
-
end
-
end
-
1
context 'without behavior' do
-
1
before do
-
1
allow( Rails.cache ).to receive( :exist? ).with( key ).and_return true
-
end
-
1
it do
-
1
expect( subject.event_attributes_cache_exist?( event: event, id: id ) ).to eq true
-
end
-
end
-
end
-
-
1
describe '.event_attributes_cache_fetch' do
-
3
subject { CurationConcernMock.new }
-
1
context 'with behavior' do
-
1
before do
-
1
allow( Rails.cache ).to receive( :fetch ).with( key_with_behavior ).and_return cache_value
-
end
-
1
it do
-
1
expect( subject.event_attributes_cache_fetch( event: event, id: id, behavior: behavior ) ).to eq cache_value
-
end
-
end
-
1
context 'without behavior' do
-
2
let( :key ) { "#{id}.#{event}" }
-
1
before do
-
1
allow( Rails.cache ).to receive( :fetch ).with( key ).and_return cache_value
-
end
-
1
it do
-
1
expect( subject.event_attributes_cache_fetch( event: event, id: id ) ).to eq cache_value
-
end
-
end
-
end
-
-
1
describe '.event_attributes_cache_key' do
-
3
subject { CurationConcernMock.new }
-
1
context 'with behavior' do
-
2
let( :result ) { key_with_behavior }
-
1
it do
-
1
expect( subject.event_attributes_cache_key( event: event, id: id, behavior: behavior ) ).to eq result
-
end
-
end
-
1
context 'without behavior' do
-
2
let( :result ) { key }
-
1
it do
-
1
expect( subject.event_attributes_cache_key( event: event, id: id, behavior: nil ) ).to eq result
-
1
expect( subject.event_attributes_cache_key( event: event, id: id, behavior: '' ) ).to eq result
-
1
expect( subject.event_attributes_cache_key( event: event, id: id ) ).to eq result
-
end
-
end
-
end
-
-
1
describe '.event_attributes_cache_write' do
-
3
subject { CurationConcernMock.new }
-
1
context 'with behavior' do
-
1
before do
-
1
allow( Rails.cache ).to receive( :write ).with( key_with_behavior, cache_value )
-
end
-
1
it do
-
1
subject.event_attributes_cache_write( event: event, id: id, attributes: cache_value, behavior: behavior )
-
end
-
end
-
1
context 'without behavior' do
-
2
let( :key ) { "#{id}.#{event}" }
-
1
before do
-
1
allow( Rails.cache ).to receive( :write ).with( key, cache_value )
-
end
-
1
it do
-
1
subject.event_attributes_cache_write( event: event, id: id, attributes: cache_value )
-
end
-
end
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
require_relative '../../../../app/models/concerns/deepblue/abstract_event_behavior'
-
1
require_relative '../../../../app/models/concerns/deepblue/metadata_behavior'
-
-
1
class CurationConcernEmptyMock
-
1
include ::Deepblue::MetadataBehavior
-
end
-
-
1
class CurationConcernMock
-
1
include ::Deepblue::MetadataBehavior
-
-
1
def description
-
4
['The Description']
-
end
-
-
1
def id
-
10
'id123'
-
end
-
-
1
def title
-
8
['The Title', 'Part 2']
-
end
-
-
1
def visiblity
-
Hydra::AccessControls::AccessRight::VISIBILITY_TEXT_VALUE_PRIVATE
-
end
-
-
1
def metadata_keys_all
-
2
%i[ id title description ]
-
end
-
-
1
def metadata_keys_brief
-
2
%i[ id title ]
-
end
-
-
1
def metadata_hash_override( key:, ignore_blank_values:, key_values: )
-
10
value = nil
-
10
handled = case key.to_s
-
when 'description'
-
2
value = description
-
2
true
-
else
-
8
false
-
end
-
10
return false unless handled
-
2
if ignore_blank_values
-
key_values[key] = value if value.present?
-
else
-
2
key_values[key] = value
-
end
-
2
return true
-
end
-
-
end
-
-
1
RSpec.describe Deepblue::AbstractEventBehavior do
-
-
2
let( :empty_mock ) { CurationConcernEmptyMock.new }
-
5
let( :mock ) { CurationConcernMock.new }
-
-
1
describe 'constants' do
-
1
it do
-
1
expect( Deepblue::MetadataBehavior::METADATA_FIELD_SEP ).to eq '; '
-
1
expect( Deepblue::MetadataBehavior::METADATA_REPORT_DEFAULT_DEPTH ).to eq 2
-
1
expect( Deepblue::MetadataBehavior::METADATA_REPORT_DEFAULT_FILENAME_POST ).to eq '_metadata_report'
-
1
expect( Deepblue::MetadataBehavior::METADATA_REPORT_DEFAULT_FILENAME_EXT ).to eq '.txt'
-
end
-
end
-
-
1
describe 'default values' do
-
1
it do
-
1
expect( empty_mock.metadata_keys_all ).to eq []
-
1
expect( empty_mock.metadata_keys_brief ).to eq []
-
1
expect( empty_mock.metadata_hash_override( key: 'key', ignore_blank_values: false, key_values: [ key: 'value' ] ) ).to eq false
-
1
expect( empty_mock.metadata_report_label_override(metadata_key: 'key', metadata_value: 'value' ) ).to eq nil
-
1
ignore_blank_key_values, keys = empty_mock.metadata_report_keys
-
1
expect( ignore_blank_key_values ).to eq ::Deepblue::AbstractEventBehavior::IGNORE_BLANK_KEY_VALUES
-
1
expect( keys ).to eq []
-
1
expect( empty_mock.metadata_report_contained_objects ).to eq []
-
1
expect( empty_mock.metadata_report_title_pre ).to eq ''
-
1
expect( empty_mock.metadata_report_title_field_sep ).to eq ' '
-
end
-
end
-
-
1
describe 'metadata_hash' do
-
2
let( :empty_hash ) { {} }
-
2
let( :expected_hash_all ) { { id: mock.id, title: mock.title, description: mock.description } }
-
2
let( :expected_hash_brief ) { { id: mock.id, title: mock.title } }
-
2
let( :expected_kv_hash_all ) { { key: 'value', id: mock.id, title: mock.title, description: mock.description } }
-
2
let( :expected_kv_hash_brief ) { { key: 'value', id: mock.id, title: mock.title } }
-
1
context 'empty' do
-
1
it do
-
1
expect( mock.metadata_hash( metadata_keys: [], ignore_blank_values: false ) ).to eq empty_hash
-
1
expect( mock.metadata_hash( metadata_keys: [], ignore_blank_values: false, **empty_hash ) ).to eq empty_hash
-
end
-
end
-
1
context 'returns correct value for id, title' do
-
1
it do
-
1
expect( mock.metadata_hash( metadata_keys: mock.metadata_keys_brief, ignore_blank_values: false ) ).to eq expected_hash_brief
-
1
expect( mock.metadata_hash( metadata_keys: mock.metadata_keys_all, ignore_blank_values: false ) ).to eq expected_hash_all
-
1
kv_hash = { key: 'value' }
-
1
expect( mock.metadata_hash( metadata_keys: mock.metadata_keys_brief, ignore_blank_values: false, **kv_hash ) ).to eq expected_kv_hash_brief
-
1
kv_hash = { key: 'value' }
-
1
expect( mock.metadata_hash( metadata_keys: mock.metadata_keys_all, ignore_blank_values: false, **kv_hash ) ).to eq expected_kv_hash_all
-
end
-
end
-
end
-
-
1
describe 'metadata_report_filename' do
-
3
let( :pathname_dir ) { "/some/path" }
-
3
let( :filename_pre ) { "pre_" }
-
1
context 'basic parms' do
-
1
it do
-
1
expect( mock.metadata_report_filename( pathname_dir: Pathname.new( pathname_dir ),
-
filename_pre: filename_pre ) ).to eq Pathname.new "/some/path/pre_id123_metadata_report.txt"
-
end
-
end
-
1
context 'all parms' do
-
1
it do
-
1
expect( mock.metadata_report_filename( pathname_dir: Pathname.new( pathname_dir ),
-
filename_pre: filename_pre,
-
filename_post: "_post",
-
filename_ext: ".ext" ) ).to eq Pathname.new "/some/path/pre_id123_post.ext"
-
end
-
end
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
require 'rails_helper'
-
-
1
RSpec.describe DataSet do
-
-
23
let( :authoremail ) { 'authoremail@umich.edu' }
-
23
let( :creator ) { 'Creator, A' }
-
6
let( :current_user ) { 'user@umich.edu' }
-
23
let( :date_created ) { '2018-02-28' }
-
17
let( :depositor ) { authoremail }
-
23
let( :description ) { 'The Description' }
-
23
let( :id ) { '0123458678' }
-
17
let( :methodology ) { 'The Methodology' }
-
21
let( :rights_license ) { 'The Rights License' }
-
23
let( :title ) { 'The Title' }
-
3
let( :visibility_private ) { Hydra::AccessControls::AccessRight::VISIBILITY_TEXT_VALUE_PRIVATE }
-
12
let( :visibility_public ) { Hydra::AccessControls::AccessRight::VISIBILITY_TEXT_VALUE_PUBLIC }
-
1
let( :metadata_keys_all ) {
-
4
%i[
-
access_deepblue
-
admin_set_id
-
authoremail
-
creator
-
curation_notes_admin
-
curation_notes_user
-
date_coverage
-
date_created
-
date_modified
-
date_published
-
date_updated
-
depositor
-
description
-
doi
-
file_set_ids
-
fundedby
-
fundedby_other
-
grantnumber
-
keyword
-
language
-
location
-
methodology
-
prior_identifier
-
referenced_by
-
rights_license
-
rights_license_other
-
subject_discipline
-
title
-
tombstone
-
total_file_count
-
total_file_size
-
total_file_size_human_readable
-
visibility
-
]
-
}
-
1
let( :metadata_keys_brief ) {
-
1
%i[
-
authoremail
-
title
-
visibility
-
]
-
}
-
1
let( :metadata_keys_update ) {
-
2
%i[
-
authoremail
-
title
-
visibility
-
]
-
}
-
2
let( :exp_class_name ) { 'DataSet' }
-
1
let( :exp_location ) { "/concern/data_sets/#{id}" }
-
-
1
describe 'constants' do
-
1
it do
-
1
expect( ::Deepblue::DoiBehavior::DOI_PENDING ).to eq 'doi_pending'
-
end
-
end
-
-
1
describe 'metadata overrides' do
-
1
before do
-
4
subject.id = id
-
4
subject.authoremail = authoremail
-
4
subject.title = [title]
-
4
subject.creator = [creator]
-
4
subject.depositor = depositor
-
4
subject.date_created = date_created
-
4
subject.description = [description]
-
4
subject.methodology = methodology
-
4
subject.rights_license = rights_license
-
4
subject.visibility = visibility_public
-
end
-
-
1
it 'provides file_set_ids' do
-
1
key = :file_set_ids
-
1
exp_value = []
-
1
key_values = { test: 'testing' }
-
1
expect( subject.metadata_hash_override( key: key, ignore_blank_values: false, key_values: key_values ) ).to eq true
-
expect( key_values[key] ).to eq exp_value
-
expect( key_values[:test] ).to eq 'testing'
-
expect( key_values.size ).to eq 2
-
end
-
-
1
it 'provides total_file_size' do
-
1
key = :total_file_size
-
1
exp_value = nil
-
1
key_values = { test: 'testing' }
-
1
expect( subject.metadata_hash_override( key: key, ignore_blank_values: false, key_values: key_values ) ).to eq true
-
1
expect( key_values[key] ).to eq exp_value
-
1
expect( key_values[:test] ).to eq 'testing'
-
1
expect( key_values.size ).to eq 2
-
end
-
-
1
it 'provides total_file_size_human_readable' do
-
1
key = :total_file_size_human_readable
-
1
exp_value = nil
-
1
key_values = { test: 'testing' }
-
1
expect( subject.metadata_hash_override( key: key, ignore_blank_values: false, key_values: key_values ) ).to eq true
-
1
expect( key_values[key] ).to eq exp_value
-
1
expect( key_values[:test] ).to eq 'testing'
-
1
expect( key_values.size ).to eq 2
-
end
-
-
1
it 'does not provide some arbritrary metadata' do
-
1
key = :some_arbritrary_metadata
-
1
key_values = { test: 'testing' }
-
1
expect( subject.metadata_hash_override( key: key, ignore_blank_values: false, key_values: key_values ) ).to eq false
-
1
expect( key_values[:test] ).to eq 'testing'
-
1
expect( key_values.size ).to eq 1
-
end
-
-
end
-
-
1
describe 'provenance metadata overrides' do
-
1
before do
-
3
subject.id = id
-
3
subject.authoremail = authoremail
-
3
subject.title = [title]
-
3
subject.creator = [creator]
-
3
subject.depositor = depositor
-
3
subject.date_created = date_created
-
3
subject.description = [description]
-
3
subject.methodology = methodology
-
3
subject.rights_license = rights_license
-
3
subject.visibility = visibility_public
-
end
-
-
1
it 'provides file_set_ids' do
-
1
prov_key_values = { test: 'testing' }
-
1
attribute = :file_set_ids
-
1
ignore_blank_key_values = false
-
1
expect( subject.map_provenance_attributes_override!( event: '',
-
attribute: attribute,
-
ignore_blank_key_values: ignore_blank_key_values,
-
prov_key_values: prov_key_values ) ).to eq true
-
expect( prov_key_values[:file_set_ids] ).to eq []
-
expect( prov_key_values[:test] ).to eq 'testing'
-
expect( prov_key_values.size ).to eq 2
-
end
-
-
1
it 'provides visibility' do
-
1
prov_key_values = { test: 'testing' }
-
1
attribute = :visibility
-
1
ignore_blank_key_values = false
-
1
expect( subject.map_provenance_attributes_override!( event: '',
-
attribute: attribute,
-
ignore_blank_key_values: ignore_blank_key_values,
-
prov_key_values: prov_key_values ) ).to eq true
-
1
expect( prov_key_values[:visibility] ).to eq visibility_public
-
1
expect( prov_key_values[:test] ).to eq 'testing'
-
1
expect( prov_key_values.size ).to eq 2
-
end
-
-
1
it 'does not provide some arbritrary metadata' do
-
1
prov_key_values = { test: 'testing' }
-
1
attribute = :some_arbritrary_metadata
-
1
ignore_blank_key_values = false
-
1
expect( subject.map_provenance_attributes_override!( event: '',
-
attribute: attribute,
-
ignore_blank_key_values: ignore_blank_key_values,
-
prov_key_values: prov_key_values ) ).to eq false
-
1
expect( prov_key_values[:test] ).to eq 'testing'
-
1
expect( prov_key_values.size ).to eq 1
-
end
-
-
end
-
-
1
describe 'properties' do
-
## TODO
-
# it 'has private visibility when created' do
-
# expect(subject.visibility).to eq visibility_private
-
# end
-
-
1
it 'has subject property' do
-
1
expect(subject).to respond_to(:subject_discipline)
-
end
-
-
1
it 'has identifier properties' do
-
1
expect(subject).to respond_to(:doi)
-
1
expect(subject).to respond_to(:hdl)
-
end
-
-
1
describe 'resource type' do
-
1
it 'is set during initialization' do
-
1
expect(subject.resource_type).to eq ['Dataset']
-
end
-
end
-
end
-
-
1
describe 'provenance constants' do
-
-
1
it 'has all metadata elements defined' do
-
1
expect( subject.attributes_all_for_provenance ).to eq metadata_keys_all
-
end
-
-
1
it 'has brief metadata elements defined' do
-
1
expect( subject.attributes_brief_for_provenance ).to eq metadata_keys_brief
-
end
-
-
1
it 'has update metadata elements defined' do
-
1
expect( subject.attributes_update_for_provenance ).to eq metadata_keys_update
-
end
-
-
end
-
-
1
describe 'provenance mint doi' do
-
1
let( :exp_despositor ) { depositor }
-
1
let( :exp_event ) { Deepblue::AbstractEventBehavior::EVENT_MINT_DOI }
-
1
let( :exp_visibility ) { visibility_public }
-
-
1
before do
-
2
subject.id = id
-
2
subject.authoremail = authoremail
-
2
subject.title = [title]
-
2
subject.creator = [creator]
-
2
subject.date_created = date_created
-
2
subject.depositor = depositor
-
2
subject.description = [description]
-
2
subject.methodology = methodology
-
2
subject.rights_license = rights_license
-
end
-
-
1
it 'uses all attributes and keeps blank ones' do
-
1
attributes, ignore_blank_key_values = subject.attributes_for_provenance_mint_doi
-
1
expect( ignore_blank_key_values ).to eq Deepblue::AbstractEventBehavior::USE_BLANK_KEY_VALUES
-
1
expect( attributes ).to eq metadata_keys_all
-
end
-
-
1
it 'is minted' do
-
1
prov_logger_received = nil
-
1
allow( PROV_LOGGER ).to receive( :info ) { |msg| prov_logger_received = msg }
-
1
before = Deepblue::ProvenanceHelper.to_log_format_timestamp Time.now
-
1
expect( subject.provenance_mint_doi( current_user: current_user ) ).to eq true
-
after = Deepblue::ProvenanceHelper.to_log_format_timestamp Time.now
-
validate_prov_logger_received( prov_logger_received: prov_logger_received,
-
size: 39,
-
before: before,
-
after: after,
-
exp_event: exp_event,
-
exp_class_name: exp_class_name,
-
exp_id: id,
-
exp_authoremail: authoremail,
-
exp_creator: [creator],
-
exp_date_created: "2018-02-28",
-
exp_description: [description],
-
exp_depositor: exp_despositor,
-
exp_location: exp_location,
-
exp_methodology: methodology,
-
exp_rights_license: rights_license,
-
exp_visibility: exp_visibility )
-
end
-
-
end
-
-
1
describe 'provenance publish' do
-
1
let( :exp_despositor ) { depositor }
-
1
let( :exp_event ) { Deepblue::AbstractEventBehavior::EVENT_PUBLISH }
-
-
1
before do
-
2
subject.id = id
-
2
subject.authoremail = authoremail
-
2
subject.title = [title]
-
2
subject.creator = [creator]
-
2
subject.date_created = date_created
-
2
subject.depositor = depositor
-
2
subject.description = [description]
-
2
subject.methodology = methodology
-
2
subject.rights_license = rights_license
-
2
subject.visibility = visibility_public
-
end
-
-
1
it 'uses all attributes and keeps blank ones' do
-
1
attributes, ignore_blank_key_values = subject.attributes_for_provenance_publish
-
1
expect( ignore_blank_key_values ).to eq Deepblue::AbstractEventBehavior::USE_BLANK_KEY_VALUES
-
1
expect( attributes ).to eq metadata_keys_all
-
end
-
-
1
it 'is published' do
-
1
prov_logger_received = nil
-
1
allow( PROV_LOGGER ).to receive( :info ) { |msg| prov_logger_received = msg }
-
1
before = Deepblue::ProvenanceHelper.to_log_format_timestamp Time.now
-
1
expect( subject.provenance_publish( current_user: current_user ) ).to eq true
-
after = Deepblue::ProvenanceHelper.to_log_format_timestamp Time.now
-
validate_prov_logger_received( prov_logger_received: prov_logger_received,
-
size: 40,
-
before: before,
-
after: after,
-
exp_event: exp_event,
-
exp_class_name: exp_class_name,
-
exp_id: id,
-
exp_authoremail: authoremail,
-
exp_creator: [creator],
-
exp_date_created: "2018-02-28",
-
exp_description: [description],
-
exp_depositor: exp_despositor,
-
exp_location: exp_location,
-
exp_message: '',
-
exp_methodology: methodology,
-
exp_rights_license: rights_license,
-
exp_visibility: visibility_public )
-
end
-
-
end
-
-
1
describe 'provenance unpublish' do
-
1
let( :exp_despositor ) { depositor }
-
1
let( :exp_event ) { Deepblue::AbstractEventBehavior::EVENT_UNPUBLISH }
-
-
1
before do
-
2
subject.id = id
-
2
subject.authoremail = authoremail
-
2
subject.title = [title]
-
2
subject.creator = [creator]
-
2
subject.date_created = date_created
-
2
subject.depositor = depositor
-
2
subject.description = [description]
-
2
subject.methodology = methodology
-
2
subject.rights_license = rights_license
-
2
subject.visibility = visibility_private
-
end
-
-
1
it 'uses all attributes and keeps blank ones' do
-
1
attributes, ignore_blank_key_values = subject.attributes_for_provenance_unpublish
-
1
expect( ignore_blank_key_values ).to eq Deepblue::AbstractEventBehavior::USE_BLANK_KEY_VALUES
-
1
expect( attributes ).to eq metadata_keys_all
-
end
-
-
1
it 'is published' do
-
1
prov_logger_received = nil
-
1
allow( PROV_LOGGER ).to receive( :info ) { |msg| prov_logger_received = msg }
-
1
before = Deepblue::ProvenanceHelper.to_log_format_timestamp Time.now
-
1
expect( subject.provenance_unpublish( current_user: current_user ) ).to eq true
-
after = Deepblue::ProvenanceHelper.to_log_format_timestamp Time.now
-
validate_prov_logger_received( prov_logger_received: prov_logger_received,
-
size: 39,
-
before: before,
-
after: after,
-
exp_event: exp_event,
-
exp_class_name: exp_class_name,
-
exp_id: id,
-
exp_authoremail: authoremail,
-
exp_creator: [creator],
-
exp_date_created: "2018-02-28",
-
exp_description: [description],
-
exp_depositor: exp_despositor,
-
exp_location: exp_location,
-
exp_methodology: methodology,
-
exp_rights_license: rights_license,
-
exp_visibility: visibility_private )
-
end
-
-
end
-
-
1
describe 'it requires core metadata' do
-
1
before do
-
6
subject.id = id
-
6
subject.authoremail = authoremail
-
6
subject.title = [title]
-
6
subject.creator = [creator]
-
6
subject.date_created = date_created
-
6
subject.description = [description]
-
6
subject.rights_license = rights_license
-
end
-
-
1
it 'validates authoremail' do
-
1
subject.authoremail = nil
-
1
expect(subject).not_to be_valid
-
end
-
-
1
it 'validates creator' do
-
1
subject.creator = []
-
1
expect(subject).not_to be_valid
-
end
-
-
1
it 'validates date_created' do
-
1
subject.date_created = nil
-
1
expect(subject).not_to be_valid
-
end
-
-
1
it 'validates description' do
-
1
subject.description = [description]
-
1
expect(subject).not_to be_valid
-
end
-
-
1
it 'validates rights_license' do
-
1
subject.rights_license = nil
-
1
expect(subject).not_to be_valid
-
end
-
-
1
it 'validates title' do
-
1
subject.title = []
-
1
expect(subject).not_to be_valid
-
end
-
end
-
-
1
describe 'it can be tombstoned' do
-
2
let( :epitaph ) { 'The reason for being tombstoned.' }
-
1
let( :depositor_at_tombstone ) { depositor }
-
1
let( :visibility_at_tombstone ) { visibility_public }
-
1
let( :exp_event ) { Deepblue::AbstractEventBehavior::EVENT_TOMBSTONE }
-
1
let( :exp_depositor ) { depositor }
-
1
let( :exp_visibility ) { visibility_private }
-
-
1
before do
-
1
subject.id = id
-
1
subject.authoremail = authoremail
-
1
subject.title = [title]
-
1
subject.creator = [creator]
-
1
subject.depositor = depositor
-
1
subject.date_created = date_created
-
1
subject.description = [description]
-
1
subject.methodology = methodology
-
1
subject.rights_license = rights_license
-
1
allow( Rails.logger ).to receive( :debug ).with( any_args )
-
end
-
-
1
it 'is tombstoned' do
-
1
prov_logger_received = nil
-
1
allow( PROV_LOGGER ).to receive( :info ) { |msg| prov_logger_received = msg }
-
1
before = Deepblue::ProvenanceHelper.to_log_format_timestamp Time.now
-
1
expect( subject.entomb!( epitaph, current_user ) ).to eq true
-
after = Deepblue::ProvenanceHelper.to_log_format_timestamp Time.now
-
validate_prov_logger_received( prov_logger_received: prov_logger_received,
-
size: 42,
-
before: before,
-
after: after,
-
exp_event: exp_event,
-
exp_class_name: exp_class_name,
-
exp_id: id,
-
exp_authoremail: authoremail,
-
exp_creator: [creator],
-
exp_date_created: "2018-02-28",
-
exp_description: [description],
-
exp_depositor: exp_depositor,
-
exp_location: exp_location,
-
exp_methodology: methodology,
-
exp_rights_license: rights_license,
-
exp_tombstone: [epitaph],
-
exp_visibility: exp_visibility,
-
depositor_at_tombstone: depositor_at_tombstone,
-
visibility_at_tombstone: visibility_at_tombstone )
-
end
-
-
end
-
-
1
describe 'provenance update' do
-
1
let( :exp_despositor ) { depositor }
-
2
let( :exp_event ) { Deepblue::AbstractEventBehavior::EVENT_UPDATE }
-
2
let( :methodology_new ) { 'The New Methodology' }
-
1
let( :methodology_old ) { 'The Old Methodology' }
-
3
let( :rights_license ) { 'The Rights License' }
-
3
let( :subject_discipline ) { 'The Subject Discipline' }
-
1
let( :form_params ) do
-
1
{ "title": [title, ""],
-
"creator": [creator, ""],
-
"authoremail": authoremail,
-
"methodology": methodology_new,
-
"description": [description, ""],
-
"rights_license": rights_license,
-
"subject_discipline": [subject_discipline, ""],
-
"fundedby": "",
-
"fundedby_other": "",
-
"grantnumber": "",
-
"keyword": [""],
-
"language": [""],
-
"referenced_by": [""],
-
"member_of_collection_ids": "",
-
"find_child_work": "",
-
"permissions_attributes": { "0": { "access": "edit", "id": "197055dd-3e5e-4714-9878-8620f2195428/39/2e/47/ca/392e47ca-b01b-4c3f-afb9-9ddb537fdacc" } },
-
"visibility_during_embargo": "restricted",
-
"embargo_release_date": "2018-06-30",
-
"visibility_after_embargo": "open",
-
"visibility_during_lease": "private",
-
"lease_expiration_date": "2018-06-30",
-
"visibility_after_lease": "open",
-
"visibility": visibility_public,
-
"version": "W/\"591319c1fdd3c69832f55e8fbbef903a4a0381a5\"",
-
"date_coverage": "",
-
"curation_notes_admin": [""],
-
"curation_notes_user": [""] }
-
end
-
2
let( :expected_attr_key_values ) { { UpdateAttribute_methodology: { attribute: :methodology, old_value: methodology, new_value: methodology_new } } }
-
2
let( :expected_added_key_values ) { { UpdateAttribute_methodology: { "attribute" => "methodology", "old_value" => "The Methodology", "new_value" => "The New Methodology" } } }
-
-
1
before do
-
2
subject.id = id
-
2
subject.authoremail = authoremail
-
2
subject.title = [title]
-
2
subject.creator = [creator]
-
2
subject.date_created = date_created
-
2
subject.depositor = depositor
-
2
subject.description = [description]
-
2
subject.methodology = methodology
-
2
subject.rights_license = rights_license
-
2
subject.subject_discipline = [subject_discipline]
-
2
subject.visibility = visibility_public
-
end
-
-
1
it 'uses update attributes and discards blank ones' do
-
1
attributes, ignore_blank_key_values = subject.attributes_for_provenance_update
-
1
expect( ignore_blank_key_values ).to eq Deepblue::AbstractEventBehavior::IGNORE_BLANK_KEY_VALUES
-
1
expect( attributes ).to eq metadata_keys_update
-
end
-
-
1
it 'logs provenance for update' do
-
1
attr_key_values = subject.provenance_log_update_before( form_params: form_params )
-
1
expect( attr_key_values ).to eq expected_attr_key_values
-
1
subject.methodology = methodology_new
-
-
1
prov_logger_received = nil
-
2
allow( PROV_LOGGER ).to receive( :info ) { |msg| prov_logger_received = msg }
-
1
before = Deepblue::ProvenanceHelper.to_log_format_timestamp Time.now
-
1
subject.provenance_log_update_after( current_user: current_user, update_attr_key_values: attr_key_values )
-
1
after = Deepblue::ProvenanceHelper.to_log_format_timestamp Time.now
-
1
validate_prov_logger_received( prov_logger_received: prov_logger_received,
-
size: 10,
-
before: before,
-
after: after,
-
exp_event: exp_event,
-
exp_class_name: exp_class_name,
-
exp_id: id,
-
exp_authoremail: authoremail,
-
exp_total_file_count: nil,
-
exp_total_file_size: nil,
-
exp_total_file_size_human_readable: nil,
-
exp_visibility: visibility_public,
-
**expected_added_key_values )
-
end
-
-
end
-
-
1
def validate_expected( rv_key_values, key, exp_value )
-
36
key = key.to_s
-
36
expect( rv_key_values[key] ).to eq exp_value if exp_value.present?
-
# the rv_key_values.key?(key) seems to have semantically changed in ruby 2.5, so skip this check until a
-
# replacement can be figured out.
-
# expect( rv_key_values.key?(key) ).to eq false if exp_value.nil?
-
end
-
-
1
def validate_prov_logger_received( prov_logger_received:,
-
size:,
-
print_all_key_values: false,
-
before: nil,
-
after: nil,
-
exp_timestamp: nil,
-
exp_time_zone: DeepBlueDocs::Application.config.timezone_zone,
-
exp_event:,
-
exp_event_note: nil,
-
exp_class_name:,
-
exp_id:,
-
exp_access_deepblue: '',
-
exp_admin_set_id: '',
-
exp_authoremail: '',
-
exp_creator: [],
-
exp_curation_notes_admin: [],
-
exp_curation_notes_user: [],
-
exp_date_coverage: '',
-
exp_date_created: '',
-
exp_date_modified: '',
-
exp_date_published: '',
-
exp_date_updated: [],
-
exp_depositor: '',
-
exp_description: [],
-
exp_doi: '',
-
exp_fundedby: '',
-
exp_fundedby_other: '',
-
exp_grantnumber: '',
-
exp_referenced_by: [],
-
exp_keyword: [],
-
exp_language: [],
-
exp_location: '',
-
exp_message: '',
-
exp_methodology: '',
-
exp_prior_identifier: [],
-
exp_rights_license: '',
-
exp_rights_license_other: '',
-
exp_subject_discipline: [],
-
exp_title: [],
-
exp_tombstone: [],
-
exp_total_file_count: 0,
-
exp_total_file_size: '',
-
exp_total_file_size_human_readable: '',
-
exp_visibility: '',
-
**added_prov_key_values )
-
-
1
expect( prov_logger_received ).to be_a String
-
rv_timestamp,
-
rv_event,
-
rv_event_note,
-
rv_class_name,
-
rv_id,
-
1
rv_key_values = Deepblue::ProvenanceHelper.parse_log_line prov_logger_received
-
1
expect( rv_timestamp ).to be_between( before, after ) if exp_timestamp.nil?
-
1
expect( rv_timestamp ).to eq exp_timestamp if exp_timestamp.present?
-
1
expect( rv_event ).to eq exp_event
-
1
expect( rv_event_note ).to eq exp_event_note if exp_event_note.present?
-
1
expect( rv_event_note ).to eq '' if exp_event_note.nil?
-
1
expect( rv_class_name ).to eq exp_class_name
-
1
expect( rv_id ).to eq exp_id
-
1
rv_key_values.each_pair { |key, value| puts "#{key},#{value}" } if print_all_key_values
-
1
expect( rv_key_values['event'] ).to eq exp_event
-
1
expect( rv_key_values['timestamp'] ).to be_between( before, after ) if before.present? && after.present?
-
1
expect( rv_key_values['timestamp'] ).to eq exp_timestamp if exp_timestamp.present?
-
1
expect( rv_key_values['time_zone'] ).to eq exp_time_zone if exp_time_zone.present?
-
1
validate_expected( rv_key_values, :event_note, exp_event_note )
-
1
validate_expected( rv_key_values, :class_name, exp_class_name )
-
1
validate_expected( rv_key_values, :id, exp_id )
-
1
validate_expected( rv_key_values, :admin_set_id, exp_admin_set_id )
-
1
validate_expected( rv_key_values, :authoremail, exp_authoremail )
-
1
validate_expected( rv_key_values, :creator, exp_creator )
-
1
validate_expected( rv_key_values, :curation_notes_admin, exp_curation_notes_admin )
-
1
validate_expected( rv_key_values, :curation_notes_user, exp_curation_notes_user )
-
1
validate_expected( rv_key_values, :date_coverage, exp_date_coverage )
-
1
validate_expected( rv_key_values, :date_created, exp_date_created )
-
1
validate_expected( rv_key_values, :date_modified, exp_date_modified )
-
1
validate_expected( rv_key_values, :date_published, exp_date_published )
-
1
validate_expected( rv_key_values, :date_updated, exp_date_updated )
-
1
validate_expected( rv_key_values, :depositor, exp_depositor )
-
1
validate_expected( rv_key_values, :description, exp_description )
-
1
validate_expected( rv_key_values, :doi, exp_doi )
-
1
validate_expected( rv_key_values, :fundedby, exp_fundedby )
-
1
validate_expected( rv_key_values, :fundedby_other, exp_fundedby_other )
-
1
validate_expected( rv_key_values, :grantnumber, exp_grantnumber )
-
1
validate_expected( rv_key_values, :referenced_by, exp_referenced_by )
-
1
validate_expected( rv_key_values, :keyword, exp_keyword )
-
1
validate_expected( rv_key_values, :language, exp_language )
-
1
validate_expected( rv_key_values, :location, exp_location )
-
1
validate_expected( rv_key_values, :message, exp_message )
-
1
validate_expected( rv_key_values, :methodology, exp_methodology )
-
1
validate_expected( rv_key_values, :prior_identifier, exp_prior_identifier )
-
1
validate_expected( rv_key_values, :rights_license, exp_rights_license )
-
1
validate_expected( rv_key_values, :rights_license_other, exp_rights_license_other )
-
1
validate_expected( rv_key_values, :subject_discipline, exp_subject_discipline )
-
1
validate_expected( rv_key_values, :title, exp_title )
-
1
validate_expected( rv_key_values, :tombstone, exp_tombstone )
-
1
validate_expected( rv_key_values, :total_file_count, exp_total_file_count )
-
1
validate_expected( rv_key_values, :total_file_size, exp_total_file_size )
-
1
validate_expected( rv_key_values, :total_file_size_human_readable, exp_total_file_size_human_readable )
-
1
validate_expected( rv_key_values, :visibility, exp_visibility )
-
1
added_prov_key_values.each_pair do |key, value|
-
1
validate_expected(rv_key_values, key, value )
-
end
-
1
expect( rv_key_values.size ).to eq size
-
end
-
-
end
-
# Generated via
-
# `rails generate hyrax:work Dissertation`
-
1
require 'rails_helper'
-
-
1
RSpec.describe Dissertation do
-
1
it "has tests" do
-
1
skip "Add your tests here"
-
end
-
end
-
# frozen_string_literal: true
-
-
1
require 'rails_helper'
-
-
1
RSpec.describe FileSet do
-
-
# include Hyrax::FactoryHelpers
-
-
# before(:all) do
-
# #puts "DataSet ids before=#{DataSet.all.map { |ds| ds.id }}"
-
# puts "FileSet ids before=#{FileSet.all.map { |fs| fs.id }}"
-
# end
-
#
-
# after(:all) do
-
# puts "FileSet ids after=#{FileSet.all.map { |fs| fs.id }}"
-
# #puts "DataSet ids after=#{DataSet.all.map { |ds| ds.id }}"
-
# # clean up created DataSet
-
# #DataSet.all.each { |ds| ds.delete }
-
# FileSet.all.each { |fs| fs.delete }
-
# end
-
-
5
let( :id ) { '0123458678' }
-
1
let( :visibility_private ) { Hydra::AccessControls::AccessRight::VISIBILITY_TEXT_VALUE_PRIVATE }
-
1
let( :visibility_public ) { Hydra::AccessControls::AccessRight::VISIBILITY_TEXT_VALUE_PUBLIC }
-
1
let( :metadata_keys_all ) {
-
1
%i[
-
curation_notes_admin
-
curation_notes_user
-
date_created
-
date_modified
-
date_uploaded
-
file_extension
-
files_count
-
file_size
-
file_size_human_readable
-
label
-
location
-
mime_type
-
original_checksum
-
original_name
-
parent_id
-
prior_identifier
-
title
-
uri
-
version_count
-
virus_scan_service
-
virus_scan_status
-
virus_scan_status_date
-
visibility
-
]
-
}
-
1
let( :metadata_keys_brief ) {
-
1
%i[
-
title
-
label
-
parent_id
-
file_extension
-
visibility
-
]
-
}
-
1
let( :metadata_keys_update ) {
-
1
%i[
-
title
-
label
-
parent_id
-
file_extension
-
version_count
-
visibility
-
]
-
}
-
-
-
1
let(:user) { create(:user) }
-
-
1
describe 'provenance constants' do
-
-
1
it 'has all metadata elements defined' do
-
1
expect( subject.attributes_all_for_provenance ).to eq metadata_keys_all
-
end
-
-
1
it 'has brief metadata elements defined' do
-
1
expect( subject.attributes_brief_for_provenance ).to eq metadata_keys_brief
-
end
-
-
1
it 'has update metadata elements defined' do
-
1
expect( subject.attributes_update_for_provenance ).to eq metadata_keys_update
-
end
-
-
end
-
-
1
describe 'rdf type' do
-
2
subject { described_class.new.type }
-
-
2
it { is_expected.to include(Hydra::PCDM::Vocab::PCDMTerms.Object, Hydra::Works::Vocab::WorksTerms.FileSet) }
-
end
-
-
1
it 'is a Hydra::Works::FileSet' do
-
1
expect(subject).to be_file_set
-
end
-
-
1
it 'has depositor' do
-
1
subject.depositor = 'tess@example.com'
-
end
-
-
1
it 'updates attributes' do
-
1
subject.attributes = { title: ['My new Title'] }
-
1
expect(subject.title).to eq(['My new Title'])
-
end
-
-
1
context 'when it is initialized' do
-
1
it 'has empty arrays for the properties' do
-
1
expect(subject.title).to eq []
-
end
-
end
-
-
1
describe 'visibility' do
-
1
it "does not be changed when it's new" do
-
1
expect(subject).not_to be_visibility_changed
-
end
-
1
it 'is changed when it has been changed' do
-
1
subject.visibility = 'open'
-
1
expect(subject).to be_visibility_changed
-
end
-
-
1
it "does not be changed when it's set to its previous value" do
-
1
subject.visibility = 'restricted'
-
1
expect(subject).not_to be_visibility_changed
-
end
-
end
-
-
1
describe '#apply_depositor_metadata' do
-
2
before { subject.apply_depositor_metadata('jcoyne') }
-
-
1
it 'grants edit access and record the depositor' do
-
1
expect(subject.edit_users).to eq ['jcoyne']
-
1
expect(subject.depositor).to eq 'jcoyne'
-
end
-
end
-
-
1
describe 'attributes' do
-
1
it 'has a set of permissions' do
-
1
subject.read_groups = %w[group1 group2]
-
1
subject.edit_users = ['user1']
-
1
subject.read_users = %w[user2 user3]
-
1
expect(subject.permissions.map(&:to_hash)).to match_array [
-
{ type: 'group', access: 'read', name: 'group1' },
-
{ type: 'group', access: 'read', name: 'group2' },
-
{ type: 'person', access: 'read', name: 'user2' },
-
{ type: 'person', access: 'read', name: 'user3' },
-
{ type: 'person', access: 'edit', name: 'user1' }
-
]
-
end
-
-
1
it "has attached content" do
-
1
expect(subject.association(:original_file)).to be_kind_of ActiveFedora::Associations::DirectlyContainsOneAssociation
-
end
-
end
-
-
1
describe 'metadata' do
-
1
it 'has descriptive metadata' do
-
1
expect(subject).to respond_to(:relative_path)
-
1
expect(subject).to respond_to(:depositor)
-
1
expect(subject).to respond_to(:related_url)
-
1
expect(subject).to respond_to(:based_near)
-
1
expect(subject).to respond_to(:contributor)
-
1
expect(subject).to respond_to(:creator)
-
1
expect(subject).to respond_to(:title)
-
1
expect(subject).to respond_to(:description)
-
1
expect(subject).to respond_to(:publisher)
-
1
expect(subject).to respond_to(:date_created)
-
1
expect(subject).to respond_to(:date_uploaded)
-
1
expect(subject).to respond_to(:date_modified)
-
1
expect(subject).to respond_to(:subject)
-
1
expect(subject).to respond_to(:language)
-
1
expect(subject).to respond_to(:license)
-
1
expect(subject).to respond_to(:resource_type)
-
1
expect(subject).to respond_to(:identifier)
-
end
-
1
it 'has properties from characterization metadata' do
-
1
expect(subject).to respond_to(:format_label)
-
1
expect(subject).to respond_to(:mime_type)
-
1
expect(subject).to respond_to(:file_size)
-
1
expect(subject).to respond_to(:last_modified)
-
1
expect(subject).to respond_to(:filename)
-
1
expect(subject).to respond_to(:original_checksum)
-
1
expect(subject).to respond_to(:well_formed)
-
1
expect(subject).to respond_to(:page_count)
-
1
expect(subject).to respond_to(:file_title)
-
1
expect(subject).to respond_to(:duration)
-
1
expect(subject).to respond_to(:sample_rate)
-
# :creator is characterization metadata?
-
1
expect(subject).to respond_to(:creator)
-
end
-
-
1
describe 'metadata overrides' do
-
1
before do
-
4
subject.id = id
-
4
subject.label = "some_file.pdf"
-
subject.visibility = visibility_public
-
end
-
-
1
it 'provides file_set_ids' do
-
key = :label
-
exp_value = "some_file.pdf"
-
key_values = { test: 'testing' }
-
expect( subject.metadata_hash_override( key: key, ignore_blank_values: false, key_values: key_values ) ).to eq true
-
expect( key_values[key] ).to eq exp_value
-
expect( key_values[:test] ).to eq 'testing'
-
expect( key_values.size ).to eq 2
-
end
-
-
1
it 'provides file_size' do
-
key = :file_size
-
exp_value = 0
-
key_values = { test: 'testing' }
-
expect( subject.metadata_hash_override( key: key, ignore_blank_values: false, key_values: key_values ) ).to eq true
-
expect( key_values[key] ).to eq exp_value
-
expect( key_values[:test] ).to eq 'testing'
-
expect( key_values.size ).to eq 2
-
end
-
-
1
it 'provides file_size_human_readable' do
-
key = :file_size_human_readable
-
exp_value = '0 Bytes'
-
key_values = { test: 'testing' }
-
expect( subject.metadata_hash_override( key: key, ignore_blank_values: false, key_values: key_values ) ).to eq true
-
expect( key_values[key] ).to eq exp_value
-
expect( key_values[:test] ).to eq 'testing'
-
expect( key_values.size ).to eq 2
-
end
-
-
1
it 'does not provide some arbritrary metadata' do
-
key = :some_arbritrary_metadata
-
key_values = { test: 'testing' }
-
expect( subject.metadata_hash_override( key: key, ignore_blank_values: false, key_values: key_values ) ).to eq false
-
expect( key_values[:test] ).to eq 'testing'
-
expect( key_values.size ).to eq 1
-
end
-
-
end
-
-
1
it 'redefines to_param to make redis keys more recognizable' do
-
1
expect(subject.to_param).to eq subject.id
-
end
-
-
# describe 'that have been saved' do
-
# before { subject.apply_depositor_metadata('jcoyne') }
-
#
-
# it 'is able to set values via delegated methods' do
-
# subject.related_url = ['http://example.org/']
-
# subject.creator = ['John Doe']
-
# subject.title = ['New work']
-
# subject.save
-
# f = subject.reload
-
# expect(f.related_url).to eq ['http://example.org/']
-
# expect(f.creator).to eq ['John Doe']
-
# expect(f.title).to eq ['New work']
-
# end
-
#
-
# it 'is able to be added to w/o unexpected graph behavior' do
-
# subject.creator = ['John Doe']
-
# subject.title = ['New work']
-
# subject.save!
-
# f = subject.reload
-
# expect(f.creator).to eq ['John Doe']
-
# expect(f.title).to eq ['New work']
-
# f.creator = ['Jane Doe']
-
# f.title += ['Newer work']
-
# f.save
-
# f = subject.reload
-
# expect(f.creator).to eq ['Jane Doe']
-
# # TODO: Is order important?
-
# expect(f.title).to include('New work')
-
# expect(f.title).to include('Newer work')
-
# end
-
# end
-
end
-
-
# describe '#indexer' do
-
# subject { described_class.indexer }
-
#
-
# it { is_expected.to eq Hyrax::FileSetIndexer }
-
#
-
# describe "setting" do
-
# before do
-
# class AltFile < ActiveFedora::Base
-
# include Hyrax::FileSetBehavior
-
# end
-
# end
-
# after do
-
# Object.send(:remove_const, :AltFile)
-
# end
-
# let(:klass) { Class.new }
-
#
-
# subject { AltFile.new }
-
#
-
# it 'is settable' do
-
# AltFile.indexer = klass
-
# expect(AltFile.indexer).to eq klass
-
# end
-
# end
-
# end
-
#
-
# it 'supports multi-valued fields in solr' do
-
# subject.keyword = %w[keyword1 keyword2]
-
# expect { subject.save }.not_to raise_error
-
# subject.delete
-
# end
-
#
-
# it 'supports setting and getting the relative_path value' do
-
# subject.relative_path = 'documents/research/NSF/2010'
-
# expect(subject.relative_path).to eq 'documents/research/NSF/2010'
-
# end
-
# describe 'create_thumbnail' do
-
# let(:file_set) do
-
# described_class.new do |f|
-
# f.apply_depositor_metadata('mjg36')
-
# end
-
# end
-
#
-
# describe 'with a video', if: Hyrax.config.enable_ffmpeg do
-
# before do
-
# allow(file_set).to receive(mime_type: 'video/quicktime') # Would get set by the characterization job
-
# file_set.save
-
# Hydra::Works::AddFileToFileSet.call(subject, File.open("#{fixture_path}/countdown.avi", 'rb'), :original_file)
-
# end
-
# it 'makes a png thumbnail' do
-
# file_set.create_thumbnail
-
# expect(file_set.thumbnail.content.size).to eq 4768 # this is a bad test. I just want to show that it did something.
-
# expect(file_set.thumbnail.mime_type).to eq 'image/png'
-
# end
-
# end
-
# end
-
#
-
# describe '#related_files' do
-
# let!(:f1) { described_class.new }
-
#
-
# context 'when there are no related files' do
-
# it 'returns an empty array' do
-
# expect(f1.related_files).to eq []
-
# end
-
# end
-
#
-
# context 'when there are related files' do
-
# let(:parent_work) { create(:data_set_with_files) }
-
# let(:f1) { parent_work.file_sets.first }
-
# let(:f2) { parent_work.file_sets.last }
-
#
-
# subject { f1.reload.related_files }
-
#
-
# it 'returns all files contained in parent work(s) but excludes itself' do
-
# expect(subject).to include(f2)
-
# expect(subject).not_to include(f1)
-
# end
-
# end
-
# end
-
#
-
# describe 'noid integration', :clean_repo do
-
# let(:service) { instance_double(::Noid::Rails::Service, mint: noid) }
-
# let(:noid) { 'wd3763094' }
-
# let!(:default) { Hyrax.config.enable_noids? }
-
#
-
# before do
-
# allow(::Noid::Rails::Service).to receive(:new).and_return(service)
-
# end
-
#
-
# after { Hyrax.config.enable_noids = default }
-
#
-
# context 'with noids enabled' do
-
# before { Hyrax.config.enable_noids = true }
-
#
-
# it 'uses the noid service' do
-
# expect(service).to receive(:mint).once
-
# subject.assign_id
-
# end
-
#
-
# context "after saving" do
-
# before { subject.save! }
-
#
-
# it 'returns the expected identifier' do
-
# expect(subject.id).to eq noid
-
# end
-
#
-
# it "has a treeified URL" do
-
# expect(subject.uri.to_s).to end_with '/wd/37/63/09/wd3763094'
-
# end
-
# end
-
#
-
# context 'when a url is provided' do
-
# let(:url) { "#{ActiveFedora.fedora.host}/test/wd/37/63/09/wd3763094" }
-
#
-
# it 'transforms the url into an id' do
-
# expect(described_class.uri_to_id(url)).to eq 'wd3763094'
-
# end
-
# end
-
# end
-
#
-
# context 'with noids disabled' do
-
# before { Hyrax.config.enable_noids = false }
-
#
-
# it 'does not use the noid service' do
-
# expect(service).not_to receive(:mint)
-
# subject.assign_id
-
# end
-
# end
-
# end
-
#
-
# context 'with access control metadata' do
-
# subject do
-
# described_class.new do |m|
-
# m.apply_depositor_metadata('jcoyne')
-
# m.permissions_attributes = [{ type: 'person', access: 'read', name: 'person1' },
-
# { type: 'person', access: 'read', name: 'person2' },
-
# { type: 'group', access: 'read', name: 'group-6' },
-
# { type: 'group', access: 'read', name: 'group-7' },
-
# { type: 'group', access: 'edit', name: 'group-8' }]
-
# end
-
# end
-
#
-
# it 'has read groups accessor' do
-
# expect(subject.read_groups).to eq ['group-6', 'group-7']
-
# end
-
#
-
# it 'has read groups writer' do
-
# subject.read_groups = ['group-2', 'group-3']
-
# expect(subject.read_groups).to eq ['group-2', 'group-3']
-
# end
-
# end
-
#
-
# describe 'permissions validation' do
-
# before { subject.apply_depositor_metadata('mjg36') }
-
#
-
# describe 'overriding' do
-
# let(:asset) { SampleKlass.new }
-
#
-
# before do
-
# class SampleKlass < FileSet
-
# def paranoid_edit_permissions
-
# []
-
# end
-
# end
-
# asset.apply_depositor_metadata('mjg36')
-
# end
-
# after do
-
# Object.send(:remove_const, :SampleKlass)
-
# end
-
# context 'when the public has edit access' do
-
# before { subject.edit_groups = ['public'] }
-
#
-
# it 'is invalid' do
-
# expect(subject).not_to be_valid
-
# expect(subject.errors[:edit_groups]).to include('Public cannot have edit access')
-
# end
-
# end
-
# end
-
#
-
# describe '#paranoid_edit_permissions=' do
-
# before do
-
# subject.paranoid_edit_permissions =
-
# [
-
# { key: :edit_users, message: 'Depositor must have edit access', condition: ->(obj) { !obj.edit_users.include?(obj.depositor) } }
-
# ]
-
# subject.permissions = [Hydra::AccessControls::Permission.new(type: 'person', name: 'mjg36', access: 'read')]
-
# end
-
# it 'uses the user supplied configuration for validation' do
-
# expect(subject).not_to be_valid
-
# expect(subject.errors[:edit_users]).to include('Depositor must have edit access')
-
# end
-
# end
-
#
-
# context 'when the public has edit access' do
-
# before { subject.edit_groups = ['public'] }
-
#
-
# it 'is invalid' do
-
# expect(subject).not_to be_valid
-
# expect(subject.errors[:edit_groups]).to include('Public cannot have edit access')
-
# end
-
# end
-
#
-
# context 'when registered has edit access' do
-
# before { subject.edit_groups = ['registered'] }
-
#
-
# it 'is invalid' do
-
# expect(subject).not_to be_valid
-
# expect(subject.errors[:edit_groups]).to include('Registered cannot have edit access')
-
# end
-
# end
-
#
-
# context 'everything is copacetic' do
-
# it 'is valid' do
-
# expect(subject).to be_valid
-
# end
-
# end
-
# end
-
#
-
# describe 'file content validation' do
-
# subject { create(:file_set) }
-
#
-
# let(:file_path) { fixture_path + '/small_file.txt' }
-
#
-
# context 'when file contains a virus' do
-
# before do
-
# allow(subject).to receive(:warn) # suppress virus warnings
-
# allow(Hydra::Works::VirusCheckerService).to receive(:file_has_virus?) { true }
-
# # TODO: Test that this works with Hydra::Works::UploadFileToFileSet. see https://github.com/samvera/hydra-works/pull/139
-
# # Hydra::Works::UploadFileToFileSet.call(subject, file_path, original_name: 'small_file.txt')
-
# of = subject.build_original_file
-
# of.content = File.open(file_path)
-
# end
-
#
-
# it 'populates the errors hash during validation' do
-
# expect(subject).not_to be_valid
-
# expect(subject.errors.messages[:base].first).to eq "Failed to verify uploaded file is not a virus"
-
# end
-
#
-
# it 'does not save the file or create a new version' do
-
# original_version_count = subject.versions.count
-
# subject.save
-
# expect(subject.versions.count).to eq original_version_count
-
# expect(subject.reload.original_file).to be_nil
-
# end
-
# end
-
# end
-
#
-
# describe '#where_digest_is', :clean_repo do
-
# let(:file) { create(:file_set) }
-
# let(:file_path) { fixture_path + '/small_file.txt' }
-
# let(:digest_string) { '88fb4e88c15682c18e8b19b8a7b6eaf8770d33cf' }
-
#
-
# before do
-
# allow(file).to receive(:warn) # suppress virus warnings
-
# of = file.build_original_file
-
# of.content = File.open(file_path)
-
# file.save
-
# file.update_index
-
# end
-
# subject { described_class.where_digest_is(digest_string).first }
-
#
-
# it { is_expected.to eq(file) }
-
# end
-
#
-
# describe 'to_solr' do
-
# let(:indexer) { double(generate_solr_document: {}) }
-
#
-
# before do
-
# allow(Hyrax::FileSetIndexer).to receive(:new)
-
# .with(subject).and_return(indexer)
-
# end
-
#
-
# it 'calls the indexer' do
-
# expect(indexer).to receive(:generate_solr_document)
-
# subject.to_solr
-
# end
-
#
-
# it 'has human readable type' do
-
# expect(subject.to_solr.fetch('human_readable_type_tesim')).to eq 'File'
-
# end
-
# end
-
#
-
# context 'with versions' do
-
# it 'has versions' do
-
# expect(subject.versions.count).to eq 0
-
# end
-
# end
-
#
-
# describe 'public?' do
-
# context 'when read group is set to public' do
-
# before { subject.read_groups = ['public'] }
-
#
-
# it { is_expected.to be_public }
-
# end
-
#
-
# context 'when read group is not set to public' do
-
# before { subject.read_groups = ['foo'] }
-
# it { is_expected.not_to be_public }
-
# end
-
# end
-
#
-
# describe 'work associations' do
-
# let(:work) { create(:data_set_with_one_file) }
-
#
-
# subject { work.file_sets.first.reload }
-
#
-
# it 'belongs to works' do
-
# expect(subject.parents).to eq [work]
-
# end
-
# end
-
#
-
# describe '#to_s' do
-
# it 'uses the provided titles' do
-
# # The title property would return the terms in random order, so stub the behavior:
-
# subject.title = %w[Hello World]
-
# expect(subject.to_s).to include 'Hello'
-
# expect(subject.to_s).to include 'World'
-
# end
-
#
-
# it 'falls back on label if no titles are given' do
-
# subject.title = []
-
# subject.label = 'Spam'
-
# expect(subject.to_s).to eq('Spam')
-
# end
-
#
-
# it 'with no label or titles it is "No Title"' do
-
# subject.title = []
-
# subject.label = nil
-
# expect(subject.to_s).to eq('No Title')
-
# end
-
# end
-
#
-
# describe 'to_solr record' do
-
# subject do
-
# described_class.new.tap do |f|
-
# f.apply_depositor_metadata(depositor)
-
# f.save
-
# end
-
# end
-
#
-
# let(:depositor) { 'jcoyne' }
-
# let(:depositor_key) { Solrizer.solr_name('depositor') }
-
# let(:title_key) { Solrizer.solr_name('title', :stored_searchable, type: :string) }
-
# let(:title) { ['abc123'] }
-
# let(:no_terms) { described_class.find(subject.id).to_solr }
-
# let(:terms) do
-
# file = described_class.find(subject.id)
-
# file.title = title
-
# file.save
-
# file.to_solr
-
# end
-
#
-
# context 'without terms' do
-
# specify 'title is nil' do
-
# expect(no_terms[title_key]).to be_nil
-
# end
-
# end
-
#
-
# context 'with terms' do
-
# specify 'depositor is set' do
-
# expect(terms[depositor_key].first).to eql(depositor)
-
# end
-
# specify 'title is set' do
-
# expect(terms[title_key]).to eql(title)
-
# end
-
# end
-
# end
-
#
-
# describe 'with a parent work' do
-
# let(:parent) { create(:data_set_with_one_file) }
-
# let(:parent_id) { parent.id }
-
#
-
# describe '#related_files' do
-
# let(:parent) { create(:data_set_with_files) }
-
# let(:sibling) { parent.file_sets.last }
-
#
-
# subject { parent.file_sets.first.reload }
-
#
-
# it 'returns related files, but not itself' do
-
# expect(subject.related_files).to eq([sibling])
-
# expect(sibling.reload.related_files).to eq([subject])
-
# end
-
# end
-
#
-
# describe '#remove_representative_relationship' do
-
# subject { parent.file_sets.first.reload }
-
#
-
# context 'it is not the representative' do
-
# let(:some_other_id) { create(:file_set).id }
-
#
-
# before do
-
# parent.representative_id = some_other_id
-
# parent.save!
-
# end
-
#
-
# it "doesn't update parent work when file is deleted" do
-
# subject.destroy
-
# expect(parent.representative_id).to eq some_other_id
-
# end
-
# end
-
#
-
# context 'it is the representative' do
-
# before do
-
# parent.representative_id = subject.id
-
# parent.save!
-
# end
-
#
-
# it 'updates the parent work when the file is deleted' do
-
# subject.destroy
-
# expect(parent.reload.representative_id).to be_nil
-
# end
-
# end
-
# end
-
# end
-
#
-
# describe 'mime type recognition' do
-
# let(:mock_file) { mock_file_factory(mime_type: mime_type) }
-
#
-
# before { allow(subject).to receive(:original_file).and_return(mock_file) }
-
#
-
# context '#image?' do
-
# context 'when image/jp2' do
-
# let(:mime_type) { 'image/jp2' }
-
#
-
# it { is_expected.to be_image }
-
# end
-
# context 'when image/jpg' do
-
# let(:mime_type) { 'image/jpg' }
-
#
-
# it { is_expected.to be_image }
-
# end
-
# context 'when image/png' do
-
# let(:mime_type) { 'image/png' }
-
#
-
# it { is_expected.to be_image }
-
# end
-
# context 'when image/tiff' do
-
# let(:mime_type) { 'image/tiff' }
-
#
-
# it { is_expected.to be_image }
-
# end
-
# end
-
#
-
# describe '#pdf?' do
-
# let(:mime_type) { 'application/pdf' }
-
#
-
# it { is_expected.to be_pdf }
-
# end
-
#
-
# describe '#audio?' do
-
# context 'when x-wave' do
-
# let(:mime_type) { 'audio/x-wave' }
-
#
-
# it { is_expected.to be_audio }
-
# end
-
# context 'when x-wav' do
-
# let(:mime_type) { 'audio/x-wav' }
-
#
-
# it { is_expected.to be_audio }
-
# end
-
# context 'when mpeg' do
-
# let(:mime_type) { 'audio/mpeg' }
-
#
-
# it { is_expected.to be_audio }
-
# end
-
# context 'when mp3' do
-
# let(:mime_type) { 'audio/mp3' }
-
#
-
# it { is_expected.to be_audio }
-
# end
-
# context 'when ogg' do
-
# let(:mime_type) { 'audio/ogg' }
-
#
-
# it { is_expected.to be_audio }
-
# end
-
# end
-
#
-
# describe '#video?' do
-
# context 'should be true for avi' do
-
# let(:mime_type) { 'video/avi' }
-
#
-
# it { is_expected.to be_video }
-
# end
-
#
-
# context 'should be true for webm' do
-
# let(:mime_type) { 'video/webm' }
-
#
-
# it { is_expected.to be_video }
-
# end
-
# context 'should be true for mp4' do
-
# let(:mime_type) { 'video/mp4' }
-
#
-
# it { is_expected.to be_video }
-
# end
-
# context 'should be true for mpeg' do
-
# let(:mime_type) { 'video/mpeg' }
-
#
-
# it { is_expected.to be_video }
-
# end
-
# context 'should be true for quicktime' do
-
# let(:mime_type) { 'video/quicktime' }
-
#
-
# it { is_expected.to be_video }
-
# end
-
# context 'should be true for mxf' do
-
# let(:mime_type) { 'application/mxf' }
-
#
-
# it { is_expected.to be_video }
-
# end
-
# end
-
# end
-
#
-
# describe "#to_global_id", clean_repo: true do
-
# let(:file_set) { described_class.new(id: '123') }
-
#
-
# subject { file_set.to_global_id }
-
#
-
# it { is_expected.to be_kind_of GlobalID }
-
# end
-
#
-
-
end
-
# Generated via
-
# `rails generate hyrax:work GenericWork`
-
1
require 'rails_helper'
-
-
1
RSpec.describe GenericWork do
-
1
it "has tests" do
-
1
skip "Add your tests here"
-
end
-
end
-
1
require 'rails_helper'
-
-
1
RSpec.describe Qa::LocalAuthorityEntry, type: :model do
-
1
pending "add some examples to (or delete) #{__FILE__}"
-
end
-
1
require 'rails_helper'
-
-
1
RSpec.describe Qa::LocalAuthority, type: :model do
-
1
pending "add some examples to (or delete) #{__FILE__}"
-
end
-
1
require 'rails_helper'
-
-
1
describe SearchBuilder do # rubocop:disable RSpec/EmptyExampleGroup
-
-
1
subject(:search_builder) { described_class.new scope }
-
1
let(:user_params) { {} }
-
1
let(:blacklight_config) { Blacklight::Configuration.new }
-
1
let(:scope) { double blacklight_config: blacklight_config }
-
-
1
describe "my custom step" do # rubocop:disable RSpec/EmptyExampleGroup
-
# subject(:query_parameters) do
-
# search_builder.with(user_params).processed_parameters
-
# end
-
#
-
# it "adds my custom data" do
-
# expect(query_parameters).to include :custom_data
-
# end
-
end
-
-
end
-
1
require 'rails_helper'
-
-
1
RSpec.describe User, type: :model do
-
-
4
let(:user) { build(:user) }
-
1
let(:another_user) { build(:user) }
-
-
1
describe 'verifying factories' do
-
1
describe ':user' do
-
2
let(:user) { build(:user) }
-
-
1
it 'will, by default, have only registered group' do
-
1
expect(user.groups).to eq([])
-
1
user.save!
-
# Ensuring that we can refind it and have the correct groups
-
1
expect(user.class.find(user.id).groups).to eq(['registered'])
-
end
-
end
-
1
describe ':admin' do
-
3
let(:admin_user) { create(:admin) }
-
-
1
it 'will be an "admin"' do
-
1
expect(admin_user.admin?).to be true
-
end
-
1
context 'when found from the database' do
-
1
it 'will be an "admin"' do
-
1
refound_admin_user = described_class.find(admin_user.id)
-
1
expect(refound_admin_user.admin?).to be true
-
end
-
end
-
end
-
end
-
-
1
it "has an email" do
-
1
expect(user.user_key).to be_kind_of String
-
end
-
1
it "has activity stream-related methods defined" do
-
1
expect(user).to respond_to(:stream)
-
1
expect(user).to respond_to(:events)
-
1
expect(user).to respond_to(:profile_events)
-
1
expect(user).to respond_to(:log_event)
-
1
expect(user).to respond_to(:log_profile_event)
-
end
-
1
it "has social attributes" do
-
1
expect(user).to respond_to(:twitter_handle)
-
1
expect(user).to respond_to(:facebook_handle)
-
1
expect(user).to respond_to(:googleplus_handle)
-
1
expect(user).to respond_to(:linkedin_handle)
-
1
expect(user).to respond_to(:orcid)
-
end
-
-
# describe 'Arkivo and Zotero integration' do
-
# it 'sets an Arkivo token after_initialize if API is enabled' do
-
# expect(described_class.new).to respond_to(:arkivo_token)
-
# end
-
#
-
# describe 'Arkivo token generation' do
-
# before do
-
# allow(SecureRandom).to receive(:base64).with(24).and_return(token1, token1, token2)
-
# end
-
#
-
# let(:token1) { 'token1' }
-
# let(:token2) { 'token2' }
-
#
-
# it 'generates a new token if a user is found with the existing token' do
-
# user1 = described_class.create(email: 'foo@example.org', password: 'foobarbaz')
-
# expect(user1.arkivo_token).to eq token1
-
# user2 = described_class.create(email: 'bar@example.org', password: 'bazquuxquuux')
-
# expect(user2.arkivo_token).to eq token2
-
# end
-
# end
-
#
-
# describe 'Zotero tokens' do
-
# let(:token) { 'something' }
-
#
-
# it 'has a custom getter/setter for Zotero request tokens' do
-
# user.zotero_token = token
-
# expect(user.read_attribute(:zotero_token)).to eq Marshal.dump(token)
-
# expect(user.zotero_token).to eq token
-
# end
-
# end
-
# end
-
#
-
# describe 'ORCID validation and normalization' do
-
# it 'saves when a valid bare ORCID is supplied' do
-
# user.orcid = '0000-0000-1111-2222'
-
# expect(user).to be_valid
-
# expect(user.save).to be true
-
# end
-
# it 'saves when an ORCID with a non-numeric check digit is provided' do
-
# user.orcid = 'http://orcid.org/0000-0000-1111-222X'
-
# expect(user).to be_valid
-
# expect(user.save).to be true
-
# end
-
# it 'saves when a valid ORCID HTTP URI w/ trailing slash is supplied' do
-
# user.orcid = 'http://orcid.org/0000-0000-1111-2222/'
-
# expect(user).to be_valid
-
# expect(user.save).to be true
-
# end
-
# it 'saves when a valid ORCID HTTPS URI is supplied' do
-
# user.orcid = 'https://orcid.org/0000-0000-1111-2222'
-
# expect(user).to be_valid
-
# expect(user.save).to be true
-
# end
-
# it 'normalizes bare ORCIDs to HTTPS URIs' do
-
# user.orcid = '0000-0000-1111-2222'
-
# user.save
-
# expect(user.orcid).to eq 'https://orcid.org/0000-0000-1111-2222'
-
# end
-
# it 'normalizes HTTP ORCIDs to HTTPS URIs' do
-
# user.orcid = 'http://orcid.org/0000-0000-1111-2222'
-
# user.save
-
# expect(user.orcid).to eq 'https://orcid.org/0000-0000-1111-2222'
-
# end
-
# it 'marks short ORCIDs as invalid' do
-
# user.orcid = '000-000-111-222'
-
# expect(user).not_to be_valid
-
# expect(user.save).to be false
-
# end
-
# it 'marks long ORCIDs as invalid' do
-
# user.orcid = '0000-0000-1111-222222'
-
# expect(user).not_to be_valid
-
# expect(user.save).to be false
-
# end
-
# end
-
#
-
# describe "#to_param" do
-
# let(:user) { described_class.new(email: 'jilluser@example.com') }
-
#
-
# it "overrides to_param to make keys more recognizable in redis (and useable within Rails URLs)" do
-
# expect(user.to_param).to eq("jilluser@example-dot-com")
-
# end
-
# end
-
#
-
# subject { user }
-
#
-
# it { is_expected.to delegate_method(:can?).to(:ability) }
-
# it { is_expected.to delegate_method(:cannot?).to(:ability) }
-
#
-
# describe '#to_sipity_agent' do
-
# subject { user.to_sipity_agent }
-
#
-
# it 'will find or create a Sipity::Agent' do
-
# user.save!
-
# expect { subject }.to change { Sipity::Agent.count }.by(1)
-
# end
-
#
-
# it 'will fail if the User is not persisted' do
-
# expect { subject }.to raise_error(ActiveRecord::StatementInvalid)
-
# end
-
#
-
# context "when another process makes the agent" do
-
# let(:user) { create(:user) }
-
#
-
# before do
-
# user.sipity_agent # load up and cache the association
-
# User.find(user.id).create_sipity_agent!
-
# end
-
# it "returns the existing agent" do
-
# expect { subject }.not_to change { Sipity::Agent.count }
-
# end
-
# end
-
# end
-
#
-
# describe "activity streams" do
-
# let(:now) { Time.zone.now.to_i }
-
# let(:activities) do
-
# [{ action: 'so and so edited their profile', timestamp: now },
-
# { action: 'so and so uploaded a file', timestamp: (now - 360) }]
-
# end
-
# let(:file_activities) do
-
# [{ action: 'uploaded a file', timestamp: now + 1 }]
-
# end
-
#
-
# before do
-
# allow(user).to receive(:events).and_return(activities)
-
# allow(user).to receive(:profile_events).and_return(file_activities)
-
# end
-
#
-
# it "gathers the user's recent activity within the default amount of time" do
-
# expect(user.all_user_activity).to eq(file_activities.concat(activities))
-
# end
-
#
-
# it "gathers the user's recent activity within a given timestamp" do
-
# expect(user.all_user_activity(now - 60)).to eq(file_activities.concat([activities.first]))
-
# end
-
# end
-
# describe "proxy_deposit_rights" do
-
# subject { create :user }
-
#
-
# before do
-
# subject.can_receive_deposits_from << user
-
# subject.can_make_deposits_for << another_user
-
# subject.save!
-
# end
-
# it "can_receive_deposits_from" do
-
# expect(subject.can_receive_deposits_from.to_a).to eq [user]
-
# expect(user.can_make_deposits_for.to_a).to eq [subject]
-
# end
-
# it "can_make_deposits_for" do
-
# expect(subject.can_make_deposits_for.to_a).to eq [another_user]
-
# expect(another_user.can_receive_deposits_from.to_a).to eq [subject]
-
# end
-
# end
-
# describe "class methods" do
-
# describe "recent_users" do
-
# let(:new_users) { described_class.all.order(created_at: :desc) }
-
#
-
# before do
-
# (1..3).each { |i| described_class.create(email: "abc#{i}@blah.frg", password: "blarg1234", created_at: Time.zone.now - i.days) }
-
# end
-
#
-
# context "when has a start date" do
-
# subject { described_class.recent_users(Time.zone.today - 2.days) }
-
#
-
# it "returns valid data" do
-
# expect(subject.count).to eq 2
-
# is_expected.to include(new_users[0], new_users[1])
-
# is_expected.not_to include(new_users[2])
-
# end
-
# end
-
#
-
# context "when has start and end date" do
-
# subject { described_class.recent_users(Time.zone.today - 2.days, Time.zone.today - 1.day) }
-
#
-
# it "returns valid data" do
-
# expect(subject.count).to eq 1
-
# is_expected.to include(new_users[1])
-
# is_expected.not_to include(new_users[2], new_users[0])
-
# end
-
# end
-
# end
-
# end
-
# describe "scope Users" do
-
# let!(:basic_user) { create(:user) }
-
# let!(:guest_user) { create(:user, :guest) }
-
# let!(:audit_user) { User.audit_user }
-
# let!(:batch_user) { User.batch_user }
-
#
-
# context "without_system_accounts" do
-
# subject { described_class.without_system_accounts }
-
#
-
# it "omits audit_user and batch_user" do
-
# is_expected.to include(basic_user, guest_user)
-
# is_expected.not_to include(audit_user, batch_user)
-
# end
-
# end
-
# context "registered" do
-
# subject { described_class.registered }
-
#
-
# it "omits guest_user" do
-
# is_expected.to include(basic_user, audit_user, batch_user)
-
# is_expected.not_to include(guest_user)
-
# end
-
# end
-
# context "guests" do
-
# subject { described_class.guests }
-
#
-
# it "includes only guest_user" do
-
# is_expected.not_to include(basic_user, audit_user, batch_user)
-
# is_expected.to include(guest_user)
-
# end
-
# end
-
# end
-
end
-
# frozen_string_literal: true
-
-
1
require 'rails_helper'
-
-
1
RSpec.describe Hyrax::AdminStatsPresenter do
-
9
let(:stats_filters) { { start_date: start_date, end_date: end_date } }
-
3
let(:start_date) { "" }
-
4
let(:end_date) { "" }
-
9
let(:limit) { 10 }
-
9
let(:instance) { described_class.new(stats_filters, limit) }
-
-
1
describe "#valid_dates" do
-
1
context "without a start date" do
-
1
it "returns true" do
-
1
expect(instance.valid_dates).to be true
-
end
-
end
-
1
context "with a start date (only)" do
-
1
context "before/on today" do
-
2
let(:start_date) { Date.current.to_s }
-
1
it "returns true" do
-
1
expect(instance.valid_dates).to be true
-
end
-
end
-
1
context "after today" do
-
2
let(:start_date) { (Date.current + 1).to_s }
-
1
it "returns false" do
-
1
expect(instance.valid_dates).to be false
-
end
-
end
-
end
-
1
context "with an end date" do
-
4
let(:end_date) { "2011-11-11" }
-
1
context "without a start date" do
-
1
it "returns true" do
-
1
expect(instance.valid_dates).to be true
-
end
-
end
-
1
context "after the start date" do
-
2
let(:start_date) { "2010-10-10" }
-
1
it "returns true" do
-
1
expect(instance.valid_dates).to be true
-
end
-
end
-
1
context "preceding the start date" do
-
2
let(:start_date) { "2012-12-12" }
-
1
it "returns false" do
-
1
expect(instance.valid_dates).to be false
-
end
-
end
-
end
-
end
-
-
1
describe "#clear_invalid_dates!" do
-
1
context "with valid dates" do
-
2
let(:start_date) { "2010-10-10" }
-
2
let(:end_date) { "2011-11-11" }
-
1
it "does not clear filters" do
-
1
expect(stats_filters[:start_date]).to be_present
-
1
expect(stats_filters[:end_date]).to be_present
-
1
instance.clear_invalid_dates!
-
1
expect(stats_filters[:start_date]).to be_present
-
1
expect(stats_filters[:end_date]).to be_present
-
end
-
end
-
1
context "with invalid dates" do
-
2
let(:start_date) { "2012-12-12" }
-
2
let(:end_date) { "2011-11-11" }
-
1
it "clears the date values" do
-
1
expect(stats_filters[:start_date]).to be_present
-
1
expect(stats_filters[:end_date]).to be_present
-
1
instance.clear_invalid_dates!
-
1
expect(stats_filters[:start_date]).to be_nil
-
1
expect(stats_filters[:end_date]).to be_nil
-
end
-
end
-
end
-
end
-
# frozen_string_literal: true
-
-
1
require 'rails_helper'
-
-
1
RSpec.describe Hyrax::DataSetPresenter do
-
19
subject { described_class.new(double, double) }
-
5
let(:solr_document) { SolrDocument.new(attributes) }
-
5
let(:request) { double(host: 'example.org', base_url: 'http://example.org') }
-
5
let(:user_key) { 'a_user_key' }
-
-
1
let(:attributes) do
-
4
{ "id" => '888888',
-
"title_tesim" => ['foo', 'bar'],
-
"human_readable_type_tesim" => ["Generic Work"],
-
"has_model_ssim" => ["DataSet"],
-
"date_created_tesim" => ['an unformatted date'],
-
"depositor_tesim" => user_key }
-
end
-
5
let(:ability) { double Ability }
-
5
let(:presenter) { described_class.new(solr_document, ability, request) }
-
-
2
it { is_expected.to delegate_method(:to_s).to(:solr_document) }
-
# it { is_expected.to delegate_method(:human_readable_type).to(:solr_document) }
-
2
it { is_expected.to delegate_method(:curation_notes_admin).to(:solr_document) }
-
2
it { is_expected.to delegate_method(:curation_notes_user).to(:solr_document) }
-
2
it { is_expected.to delegate_method(:date_created).to(:solr_document) }
-
2
it { is_expected.to delegate_method(:date_modified).to(:solr_document) }
-
2
it { is_expected.to delegate_method(:date_published).to(:solr_document) }
-
2
it { is_expected.to delegate_method(:date_uploaded).to(:solr_document) }
-
2
it { is_expected.to delegate_method(:fundedby).to(:solr_document) }
-
2
it { is_expected.to delegate_method(:fundedby_other).to(:solr_document) }
-
2
it { is_expected.to delegate_method(:rights_license).to(:solr_document) }
-
2
it { is_expected.to delegate_method(:rights_license_other).to(:solr_document) }
-
-
2
it { is_expected.to delegate_method(:based_near_label).to(:solr_document) }
-
2
it { is_expected.to delegate_method(:related_url).to(:solr_document) }
-
2
it { is_expected.to delegate_method(:depositor).to(:solr_document) }
-
2
it { is_expected.to delegate_method(:identifier).to(:solr_document) }
-
2
it { is_expected.to delegate_method(:resource_type).to(:solr_document) }
-
2
it { is_expected.to delegate_method(:keyword).to(:solr_document) }
-
2
it { is_expected.to delegate_method(:itemtype).to(:solr_document) }
-
-
1
describe "#relative_url_root" do
-
2
subject { presenter.relative_url_root }
-
2
it { is_expected.to eq '' } # this is true for test, in dev or prod it would be equal to '/data'
-
end
-
-
1
describe "#model_name" do
-
2
subject { presenter.model_name }
-
-
2
it { is_expected.to be_kind_of ActiveModel::Name }
-
end
-
-
1
describe '#manifest_url' do
-
2
subject { presenter.manifest_url }
-
-
2
it { is_expected.to eq 'http://example.org/concern/data_sets/888888/manifest' }
-
end
-
-
-
# describe '#iiif_viewer?' do
-
# let(:id_present) { false }
-
# let(:representative_presenter) { double('representative', present?: false) }
-
# let(:image_boolean) { false }
-
# let(:iiif_enabled) { false }
-
# let(:file_set_presenter) { Hyrax::FileSetPresenter.new(solr_document, ability) }
-
# let(:file_set_presenters) { [file_set_presenter] }
-
# let(:read_permission) { true }
-
#
-
# before do
-
# allow(presenter).to receive(:representative_id).and_return(id_present)
-
# allow(presenter).to receive(:representative_presenter).and_return(representative_presenter)
-
# allow(presenter).to receive(:file_set_presenters).and_return(file_set_presenters)
-
# allow(file_set_presenter).to receive(:image?).and_return(true)
-
# allow(ability).to receive(:can?).with(:read, solr_document.id).and_return(read_permission)
-
# allow(representative_presenter).to receive(:image?).and_return(image_boolean)
-
# allow(Hyrax.config).to receive(:iiif_image_server?).and_return(iiif_enabled)
-
# end
-
#
-
# subject { presenter.iiif_viewer? }
-
#
-
# context 'with no representative_id' do
-
# it { is_expected.to be false }
-
# end
-
#
-
# context 'with no representative_presenter' do
-
# let(:id_present) { true }
-
#
-
# it { is_expected.to be false }
-
# end
-
#
-
# context 'with non-image representative_presenter' do
-
# let(:id_present) { true }
-
# let(:representative_presenter) { double('representative', present?: true) }
-
# let(:image_boolean) { true }
-
#
-
# it { is_expected.to be false }
-
# end
-
#
-
# context 'with IIIF image server turned off' do
-
# let(:id_present) { true }
-
# let(:representative_presenter) { double('representative', present?: true) }
-
# let(:image_boolean) { true }
-
# let(:iiif_enabled) { false }
-
#
-
# it { is_expected.to be false }
-
# end
-
#
-
# context 'with representative image and IIIF turned on' do
-
# let(:id_present) { true }
-
# let(:representative_presenter) { double('representative', present?: true) }
-
# let(:image_boolean) { true }
-
# let(:iiif_enabled) { true }
-
#
-
# it { is_expected.to be true }
-
#
-
# context "when the user doesn't have permission to view the image" do
-
# let(:read_permission) { false }
-
#
-
# it { is_expected.to be false }
-
# end
-
# end
-
# end
-
#
-
# describe '#stats_path' do
-
# let(:user) { 'sarah' }
-
# let(:ability) { double "Ability" }
-
# let(:work) { build(:generic_work, id: '123abc') }
-
# let(:attributes) { work.to_solr }
-
#
-
# before do
-
# # https://github.com/samvera/active_fedora/issues/1251
-
# allow(work).to receive(:persisted?).and_return(true)
-
# end
-
#
-
# it { expect(presenter.stats_path).to eq Hyrax::Engine.routes.url_helpers.stats_work_path(id: work, locale: 'en') }
-
# end
-
#
-
# describe '#itemtype' do
-
# let(:work) { build(:generic_work, resource_type: type) }
-
# let(:attributes) { work.to_solr }
-
# let(:ability) { double "Ability" }
-
#
-
# subject { presenter.itemtype }
-
#
-
# context 'when resource_type is Audio' do
-
# let(:type) { ['Audio'] }
-
#
-
# it do
-
# is_expected.to eq 'http://schema.org/AudioObject'
-
# end
-
# end
-
#
-
# context 'when resource_type is Conference Proceeding' do
-
# let(:type) { ['Conference Proceeding'] }
-
#
-
# it { is_expected.to eq 'http://schema.org/ScholarlyArticle' }
-
# end
-
# end
-
#
-
# describe 'admin users' do
-
# let(:user) { create(:user) }
-
# let(:ability) { Ability.new(user) }
-
# let(:attributes) do
-
# {
-
# "read_access_group_ssim" => ["public"],
-
# 'id' => '99999'
-
# }
-
# end
-
#
-
# before { allow(user).to receive_messages(groups: ['admin', 'registered']) }
-
#
-
# context 'with a new public work' do
-
# it 'can feature the work' do
-
# allow(user).to receive(:can?).with(:create, FeaturedWork).and_return(true)
-
# expect(presenter.work_featurable?).to be true
-
# expect(presenter.display_feature_link?).to be true
-
# expect(presenter.display_unfeature_link?).to be false
-
# end
-
# end
-
#
-
# context 'with a featured work' do
-
# before { FeaturedWork.create(work_id: attributes.fetch('id')) }
-
# it 'can unfeature the work' do
-
# expect(presenter.work_featurable?).to be true
-
# expect(presenter.display_feature_link?).to be false
-
# expect(presenter.display_unfeature_link?).to be true
-
# end
-
# end
-
#
-
# describe "#editor?" do
-
# subject { presenter.editor? }
-
#
-
# it { is_expected.to be true }
-
# end
-
# end
-
#
-
# describe '#tweeter' do
-
# let(:user) { instance_double(User, user_key: 'user_key') }
-
#
-
# subject { presenter.tweeter }
-
#
-
# it 'delegates the depositor as the user_key to TwitterPresenter.twitter_handle_for' do
-
# expect(Hyrax::TwitterPresenter).to receive(:twitter_handle_for).with(user_key: user_key)
-
# subject
-
# end
-
# end
-
#
-
# describe "#permission_badge" do
-
# let(:badge) { instance_double(Hyrax::PermissionBadge) }
-
#
-
# before do
-
# allow(Hyrax::PermissionBadge).to receive(:new).and_return(badge)
-
# end
-
# it "calls the PermissionBadge object" do
-
# expect(badge).to receive(:render)
-
# presenter.permission_badge
-
# end
-
# end
-
#
-
# describe "#work_presenters" do
-
# let(:obj) { create(:work_with_file_and_work) }
-
# let(:attributes) { obj.to_solr }
-
#
-
# it "filters out members that are file sets" do
-
# expect(presenter.work_presenters.size).to eq 1
-
# expect(presenter.work_presenters.first).to be_instance_of(described_class)
-
# end
-
# end
-
#
-
# describe "#member_presenters" do
-
# let(:obj) { create(:work_with_file_and_work) }
-
# let(:attributes) { obj.to_solr }
-
#
-
# it "returns appropriate classes for each" do
-
# expect(presenter.member_presenters.size).to eq 2
-
# expect(presenter.member_presenters.first).to be_instance_of(Hyrax::FileSetPresenter)
-
# expect(presenter.member_presenters.last).to be_instance_of(described_class)
-
# end
-
# end
-
#
-
# describe "#file_set_presenters" do
-
# let(:obj) { create(:work_with_ordered_files) }
-
# let(:attributes) { obj.to_solr }
-
#
-
# it "displays them in order" do
-
# expect(presenter.file_set_presenters.map(&:id)).to eq obj.ordered_member_ids
-
# end
-
#
-
# context "solr query" do
-
# before do
-
# expect(ActiveFedora::SolrService).to receive(:query).twice.with(anything, hash_including(rows: 10_000)).and_return([])
-
# end
-
#
-
# it "requests >10 rows" do
-
# presenter.file_set_presenters
-
# end
-
# end
-
#
-
# context "when some of the members are not file sets" do
-
# let(:another_work) { create(:work) }
-
#
-
# before do
-
# obj.ordered_members << another_work
-
# obj.save!
-
# end
-
#
-
# it "filters out members that are not file sets" do
-
# expect(presenter.file_set_presenters.map(&:id)).not_to include another_work.id
-
# end
-
# end
-
# end
-
#
-
# describe "#representative_presenter" do
-
# let(:obj) { create(:work_with_representative_file) }
-
# let(:attributes) { obj.to_solr }
-
#
-
# it "has a representative" do
-
# expect(Hyrax::PresenterFactory).to receive(:build_for)
-
# .with(ids: [obj.members[0].id],
-
# presenter_class: Hyrax::CompositePresenterFactory,
-
# presenter_args: [ability, request])
-
# .and_return ["abc"]
-
# expect(presenter.representative_presenter).to eq("abc")
-
# end
-
#
-
# context 'without a representative' do
-
# let(:obj) { create(:work) }
-
#
-
# it 'has a nil presenter' do
-
# expect(presenter.representative_presenter).to be_nil
-
# end
-
# end
-
#
-
# context 'when it is its own representative' do
-
# let(:obj) { create(:work) }
-
#
-
# before do
-
# obj.representative_id = obj.id
-
# obj.save
-
# end
-
#
-
# it 'has a nil presenter; avoids infinite loop' do
-
# expect(presenter.representative_presenter).to be_nil
-
# end
-
# end
-
# end
-
#
-
# describe "#download_url" do
-
# subject { presenter.download_url }
-
#
-
# let(:solr_document) { SolrDocument.new(work.to_solr) }
-
#
-
# context "with a representative" do
-
# let(:work) { create(:work_with_representative_file) }
-
#
-
# it { is_expected.to eq "http://#{request.host}/downloads/#{work.representative_id}" }
-
# end
-
#
-
# context "without a representative" do
-
# let(:work) { create(:work) }
-
#
-
# it { is_expected.to eq '' }
-
# end
-
# end
-
-
1
describe '#page_title' do
-
2
subject { presenter.page_title }
-
-
2
it { is_expected.to eq 'Data Set | foo | ID: 888888 | DataCORE' }
-
end
-
-
# describe "#valid_child_concerns" do
-
# subject { presenter }
-
#
-
# it "delegates to the class attribute of the model" do
-
# allow(DataSet).to receive(:valid_child_concerns).and_return([DataSet])
-
#
-
# expect(subject.valid_child_concerns).to eq [DataSet]
-
# end
-
# end
-
#
-
# describe "#attribute_to_html" do
-
# let(:renderer) { double('renderer') }
-
#
-
# context 'with an existing field' do
-
# before do
-
# allow(Hyrax::Renderers::AttributeRenderer).to receive(:new)
-
# .with(:title, ['foo', 'bar'], {})
-
# .and_return(renderer)
-
# end
-
#
-
# it "calls the AttributeRenderer" do
-
# expect(renderer).to receive(:render)
-
# presenter.attribute_to_html(:title)
-
# end
-
# end
-
#
-
# context "with a field that doesn't exist" do
-
# it "logs a warning" do
-
# expect(Rails.logger).to receive(:warn).with('Hyrax::WorkShowPresenter attempted to render restrictions, but no method exists with that name.')
-
# presenter.attribute_to_html(:restrictions)
-
# end
-
# end
-
# end
-
#
-
# context "with workflow" do
-
# let(:user) { create(:user) }
-
# let(:ability) { Ability.new(user) }
-
# let(:entity) { instance_double(Sipity::Entity) }
-
#
-
# describe "#workflow" do
-
# subject { presenter.workflow }
-
#
-
# it { is_expected.to be_kind_of Hyrax::WorkflowPresenter }
-
# end
-
# end
-
#
-
# context "with inspect_work" do
-
# let(:user) { create(:user) }
-
# let(:ability) { Ability.new(user) }
-
#
-
# describe "#inspect_work" do
-
# subject { presenter.inspect_work }
-
#
-
# it { is_expected.to be_kind_of Hyrax::InspectWorkPresenter }
-
# end
-
# end
-
#
-
# describe "graph export methods" do
-
# let(:graph) do
-
# RDF::Graph.new.tap do |g|
-
# g << [RDF::URI('http://example.com/1'), RDF::Vocab::DC.title, 'Test title']
-
# end
-
# end
-
#
-
# let(:exporter) { double }
-
#
-
# before do
-
# allow(Hyrax::GraphExporter).to receive(:new).and_return(exporter)
-
# allow(exporter).to receive(:fetch).and_return(graph)
-
# end
-
#
-
# describe "#export_as_nt" do
-
# subject { presenter.export_as_nt }
-
#
-
# it { is_expected.to eq "<http://example.com/1> <http://purl.org/dc/terms/title> \"Test title\" .\n" }
-
# end
-
#
-
# describe "#export_as_ttl" do
-
# subject { presenter.export_as_ttl }
-
#
-
# it { is_expected.to eq "\n<http://example.com/1> <http://purl.org/dc/terms/title> \"Test title\" .\n" }
-
# end
-
#
-
# describe "#export_as_jsonld" do
-
# subject { presenter.export_as_jsonld }
-
#
-
# it do
-
# is_expected.to eq '{
-
# "@context": {
-
# "dc": "http://purl.org/dc/terms/"
-
# },
-
# "@id": "http://example.com/1",
-
# "dc:title": "Test title"
-
# }'
-
# end
-
# end
-
# end
-
#
-
# describe "#manifest" do
-
# let(:work) { create(:work_with_one_file) }
-
# let(:solr_document) { SolrDocument.new(work.to_solr) }
-
#
-
# describe "#sequence_rendering" do
-
# subject do
-
# presenter.sequence_rendering
-
# end
-
#
-
# before do
-
# Hydra::Works::AddFileToFileSet.call(work.file_sets.first,
-
# File.open(fixture_path + '/world.png'), :original_file)
-
# end
-
#
-
# it "returns a hash containing the rendering information" do
-
# work.rendering_ids = [work.file_sets.first.id]
-
# expect(subject).to be_an Array
-
# end
-
# end
-
#
-
# describe "#manifest_metadata" do
-
# subject do
-
# presenter.manifest_metadata
-
# end
-
#
-
# before do
-
# work.title = ['Test title', 'Another test title']
-
# end
-
#
-
# it "returns an array of metadata values" do
-
# expect(subject[0]['label']).to eq('Title')
-
# expect(subject[0]['value']).to include('Test title', 'Another test title')
-
# end
-
# end
-
# end
-
-
-
-
end
-
# Generated via
-
# `rails generate hyrax:work Dissertation`
-
1
require 'rails_helper'
-
-
1
RSpec.describe Hyrax::DissertationPresenter do
-
1
it "has tests" do
-
1
skip "Add your tests here"
-
end
-
end
-
# Generated via
-
# `rails generate hyrax:work GenericWork`
-
1
require 'rails_helper'
-
-
1
RSpec.describe Hyrax::GenericWorkPresenter do
-
1
it "has tests" do
-
1
skip "Add your tests here"
-
end
-
end
-
# This file is copied to spec/ when you run 'rails generate rspec:install'
-
1
require 'spec_helper'
-
1
ENV['RAILS_ENV'] ||= 'test'
-
1
require File.expand_path('../../config/environment', __FILE__)
-
# Prevent database truncation if the environment is production
-
1
abort("The Rails environment is running in production mode!") if Rails.env.production?
-
1
require 'rspec/rails'
-
# Add additional requires below this line. Rails is not loaded until this point!
-
-
# Requires supporting ruby files with custom matchers and macros, etc, in
-
# spec/support/ and its subdirectories. Files matching `spec/**/*_spec.rb` are
-
# run as spec files by default. This means that files in spec/support that end
-
# in _spec.rb will both be required and run as specs, causing the specs to be
-
# run twice. It is recommended that you do not name files matching this glob to
-
# end with _spec.rb. You can configure this pattern with the --pattern
-
# option on the command line or in ~/.rspec, .rspec or `.rspec-local`.
-
#
-
# The following line is provided for convenience purposes. It has the downside
-
# of increasing the boot-up time by auto-requiring all files in the support
-
# directory. Alternatively, in the individual `*_spec.rb` files, manually
-
# require only the support files necessary.
-
#
-
3
Dir[Rails.root.join('spec/support/**/*.rb')].each { |f| require f } # rubocop:disable Rails/FilePath
-
-
# Checks for pending migrations and applies them before tests are run.
-
# If you are not using ActiveRecord, you can remove this line.
-
1
ActiveRecord::Migration.maintain_test_schema!
-
-
-
1
RSpec.configure do |config|
-
# Remove this line if you're not using ActiveRecord or ActiveRecord fixtures
-
1
config.fixture_path = "#{::Rails.root}/spec/fixtures"
-
-
# If you're not using ActiveRecord, or you'd prefer not to run each of your
-
# examples within a transaction, remove the following line or assign false
-
# instead of true.
-
1
config.use_transactional_fixtures = false # true
-
-
# RSpec Rails can automatically mix in different behaviours to your tests
-
# based on their file location, for example enabling you to call `get` and
-
# `post` in specs under `spec/controllers`.
-
#
-
# You can disable this behaviour by removing the line below, and instead
-
# explicitly tag your specs with their type, e.g.:
-
#
-
# RSpec.describe UsersController, :type => :controller do
-
# # ...
-
# end
-
#
-
# The different available types are documented in the features, such as in
-
# https://relishapp.com/rspec/rspec-rails/docs
-
1
config.infer_spec_type_from_file_location!
-
-
# Filter lines from Rails gems in backtraces.
-
1
config.filter_rails_from_backtrace!
-
# arbitrary gems may also be filtered via:
-
# config.filter_gems_from_backtrace("gem name")
-
1
config.include Devise::Test::ControllerHelpers, type: :controller
-
-
1
config.before(:suite) do
-
1
DatabaseCleaner.clean_with(:truncation)
-
end
-
-
1
config.before do
-
410
DatabaseCleaner.strategy = :transaction
-
end
-
-
1
config.before(:each, js: true) do
-
2
DatabaseCleaner.strategy = :truncation
-
end
-
-
# This block must be here, do not combine with the other `before(:each)` block.
-
# This makes it so Capybara can see the database.
-
1
config.before do
-
410
DatabaseCleaner.start
-
end
-
-
1
config.after do
-
410
DatabaseCleaner.clean
-
end
-
-
end
-
1
require 'rails_helper'
-
-
-
1
RSpec.describe Dataset::DateCoverageService do
-
-
1
describe '#params_to_interval' do
-
-
1
context "when called with year, month, and day for begin_date and end_date" do
-
1
it "returns interval with day precision" do
-
-
1
params = { :date_coverage_begin_year => "2001", :date_coverage_begin_month => "1", :date_coverage_begin_day => "5",
-
:date_coverage_end_year => "2001", :date_coverage_end_month => "1", :date_coverage_end_day => "10" }
-
-
1
expect(Dataset::DateCoverageService.params_to_interval params).to eq Date.edtf('2001-01-05/2001-01-10')
-
end
-
end
-
-
1
context "when called with year and month for begin_date and end_date" do
-
1
it "returns interval with month precision" do
-
-
1
params = { :date_coverage_begin_year => "2001", :date_coverage_begin_month => "1", :date_coverage_begin_day => "",
-
:date_coverage_end_year => "2001", :date_coverage_end_month => "2", :date_coverage_end_day => "" }
-
-
1
expect(Dataset::DateCoverageService.params_to_interval params).to eq Date.edtf('2001-01/2001-02')
-
end
-
end
-
-
1
context "when called with year begin_date and end_date" do
-
1
it "returns interval with year precision" do
-
-
1
params = { :date_coverage_begin_year => "2002", :date_coverage_begin_month => "", :date_coverage_begin_day => "",
-
:date_coverage_end_year => "2003", :date_coverage_end_month => "", :date_coverage_end_day => "" }
-
-
1
expect(Dataset::DateCoverageService.params_to_interval params).to eq Date.edtf('2002/2003')
-
end
-
end
-
-
1
context "when called with begin_date but no end_date" do
-
1
it "returns Start Date" do
-
-
1
params = { :date_coverage_begin_year => "2007", :date_coverage_begin_month => "5", :date_coverage_begin_day => "6",
-
:date_coverage_end_year => "", :date_coverage_end_month => "", :date_coverage_end_day => "" }
-
-
1
expect(Dataset::DateCoverageService.params_to_interval params).to eq Date.edtf('2007-05-06')
-
end
-
end
-
-
1
context "when called with end_date but no begin_date" do
-
1
it "returns unknown to End Date" do
-
-
1
params = { :date_coverage_begin_year => "", :date_coverage_begin_month => "", :date_coverage_begin_day => "",
-
:date_coverage_end_year => "2010", :date_coverage_end_month => "10", :date_coverage_end_day => "10" }
-
-
1
expect(Dataset::DateCoverageService.params_to_interval params).to eq Date.edtf('unknown/2010-10-10')
-
end
-
end
-
-
1
context "when called with no date values" do
-
1
it "returns blank " do
-
1
params = { :date_coverage_begin_year => "", :date_coverage_begin_month => "", :date_coverage_begin_day => "",
-
:date_coverage_end_year => "", :date_coverage_end_month => "", :date_coverage_end_day => "" }
-
-
1
expect(Dataset::DateCoverageService.params_to_interval(params)).to be_blank
-
end
-
end
-
-
1
context "when called with reversed date values" do
-
1
it "returns blank" do
-
1
params = { :date_coverage_begin_year => "2001", :date_coverage_begin_month => "1", :date_coverage_begin_day => "6",
-
:date_coverage_end_year => "2001", :date_coverage_end_month => "1", :date_coverage_end_day => "5" }
-
-
1
expect(Dataset::DateCoverageService.params_to_interval(params)).to be_blank
-
end
-
end
-
end
-
-
-
1
describe '#interval_to_params' do
-
-
1
context "when called with an interval" do
-
1
it "returns date params of interval" do
-
-
1
params = { :date_coverage_begin_year => "2002", :date_coverage_begin_month => "2", :date_coverage_begin_day => "6",
-
:date_coverage_end_year => "2004", :date_coverage_end_month => "8", :date_coverage_end_day => "16" }
-
-
1
expect(Dataset::DateCoverageService.interval_to_params Date.edtf('2002-02-06/2004-08-16')).to eq params
-
end
-
end
-
-
1
context "when called with argument that is not an interval" do
-
1
it "returns blank when not called with interval" do
-
-
1
expect(Dataset::DateCoverageService.interval_to_params Date.new(2001,2,25) ).to be_blank
-
end
-
end
-
-
1
context "when called with interval in reverse chronological order" do
-
1
it "returns blank" do
-
-
1
expect(Dataset::DateCoverageService.interval_to_params Date.edtf('2004-08-16/2002-02-06') ).to be_blank
-
end
-
end
-
-
-
end
-
-
end
-
1
require 'rails_helper'
-
-
1
class MockAsset
-
1
def initialize( id:, model_name: )
-
5
@id = id
-
5
@model_name = model_name
-
end
-
1
def id
-
5
@id.to_s
-
end
-
1
def model_name
-
2
@model_name
-
end
-
1
def to_ary
-
3
[self]
-
end
-
end
-
-
1
class MockLeadTime
-
1
def initialize(days:)
-
5
@days = days
-
end
-
1
def to_s
-
5
@days.to_s + " days"
-
end
-
1
def days
-
5
@days
-
end
-
end
-
-
1
RSpec.describe Deepblue::AboutToExpireEmbargoesService do
-
12
subject { described_class.new }
-
-
1
describe "#initialize" do
-
1
before {
-
2
allow(Deepblue::LoggingHelper).to receive(:here).and_return "here"
-
2
allow(Deepblue::LoggingHelper).to receive(:called_from).and_return "from"
-
2
allow(Deepblue::LoggingHelper).to receive(:obj_class).with("class", anything).and_return "object class"
-
2
allow(Deepblue::LoggingHelper).to receive(:bold_debug).with ["here",
-
"from",
-
"object class",
-
"email_owner=true",
-
"expiration_lead_days=",
-
"skip_file_sets=true",
-
"test_mode=true",
-
"to_console=false",
-
"verbose=false",
-
""]
-
}
-
-
1
it "calls LoggingHelper.bold_debug" do
-
1
expect(Deepblue::LoggingHelper).to receive(:here).and_return "here"
-
1
expect(Deepblue::LoggingHelper).to receive(:called_from).and_return "from"
-
1
expect(Deepblue::LoggingHelper).to receive(:obj_class).with("class", anything).and_return "object class"
-
-
1
expect(Deepblue::LoggingHelper).to receive(:bold_debug).with ["here",
-
"from",
-
"object class",
-
"email_owner=true",
-
"expiration_lead_days=",
-
"skip_file_sets=true",
-
"test_mode=true",
-
"to_console=false",
-
"verbose=false",
-
""]
-
1
Deepblue::AboutToExpireEmbargoesService.new
-
end
-
-
1
it "sets instance variables" do
-
1
subject.instance_variable_get(:@email_owner) == true
-
1
subject.instance_variable_get(:@expiration_lead_days) == nil
-
1
subject.instance_variable_get(:@skip_file_sets) == true
-
1
subject.instance_variable_get(:@test_mode) == true
-
1
subject.instance_variable_get(:@to_console) == false
-
1
subject.instance_variable_get(:@verbose) == false
-
-
end
-
end
-
-
1
describe "#run" do
-
1
mock_asset = MockAsset.new id:1001, model_name: "model name"
-
-
1
before {
-
# stubbing initialize
-
3
allow(Deepblue::LoggingHelper).to receive(:bold_debug).with ["here",
-
"from",
-
"object class",
-
"email_owner=true",
-
"expiration_lead_days=",
-
"skip_file_sets=true",
-
"test_mode=true",
-
"to_console=false",
-
"verbose=false",
-
""]
-
-
3
allow(Deepblue::LoggingHelper).to receive(:here).and_return "here"
-
3
allow(Deepblue::LoggingHelper).to receive(:called_from).and_return "from"
-
3
allow(Deepblue::LoggingHelper).to receive(:obj_class).with("class", anything).and_return "object class"
-
-
3
allow(DateTime).to receive(:now).and_return DateTime.new(2025, 5, 5)
-
3
allow(subject).to receive(:assets_under_embargo).and_return(mock_asset)
-
-
3
allow(subject).to receive(:about_to_expire_embargoes_for_lead_days).with lead_days: 7
-
3
allow(subject).to receive(:about_to_expire_embargoes_for_lead_days).with lead_days: 1
-
}
-
-
1
context "when @expiration_lead_days is blank" do
-
1
it "calls LoggingHelper.bold_debug, calls about_to_expire_embargoes_for_lead_days with default values" do
-
1
expect(Deepblue::LoggingHelper).to receive(:bold_debug).with ["here",
-
"from",
-
"object class",
-
"@email_owner=true",
-
"@expiration_lead_days=",
-
"@skip_file_sets=true",
-
"@test_mode=true",
-
"@to_console=false",
-
"@verbose=false",
-
""]
-
1
expect(subject).to receive(:about_to_expire_embargoes_for_lead_days).with lead_days: 7
-
1
expect(subject).to receive(:about_to_expire_embargoes_for_lead_days).with lead_days: 1
-
-
1
subject.run
-
-
1
subject.instance_variable_get(:@now) == DateTime.new(2025, 5, 5)
-
1
subject.instance_variable_get(:@assets) == [mock_asset]
-
end
-
end
-
-
1
context "when @expiration_lead_days is an integer greater than zero" do
-
1
before {
-
1
subject.instance_variable_set(:@expiration_lead_days, "27")
-
-
1
allow(Deepblue::LoggingHelper).to receive(:bold_debug).with ["here",
-
"from",
-
"object class",
-
"@email_owner=true",
-
"@expiration_lead_days=27",
-
"@skip_file_sets=true",
-
"@test_mode=true",
-
"@to_console=false",
-
"@verbose=false",
-
""]
-
1
allow(subject).to receive(:about_to_expire_embargoes_for_lead_days).with lead_days: 27
-
}
-
-
1
it "calls LoggingHelper.bold_debug, calls about_to_expire_embargoes_for_lead_days with @expiration_lead_days" do
-
1
expect(Deepblue::LoggingHelper).to receive(:bold_debug).with ["here",
-
"from",
-
"object class",
-
"@email_owner=true",
-
"@expiration_lead_days=27",
-
"@skip_file_sets=true",
-
"@test_mode=true",
-
"@to_console=false",
-
"@verbose=false",
-
""]
-
1
expect(subject).to receive(:about_to_expire_embargoes_for_lead_days).with lead_days: 27
-
1
subject.run
-
end
-
end
-
-
1
context "when @expiration_lead_days is 0 or less" do
-
1
before {
-
1
subject.instance_variable_set(:@expiration_lead_days, "0")
-
-
1
allow(Deepblue::LoggingHelper).to receive(:bold_debug).with ["here",
-
"from",
-
"object class",
-
"@email_owner=true",
-
"@expiration_lead_days=0",
-
"@skip_file_sets=true",
-
"@test_mode=true",
-
"@to_console=false",
-
"@verbose=false",
-
""]
-
}
-
-
1
it "calls LoggingHelper.bold_debug, calls about_to_expire_embargoes_for_lead_days with default values" do
-
1
expect(Deepblue::LoggingHelper).to receive(:bold_debug).with ["here",
-
"from",
-
"object class",
-
"@email_owner=true",
-
"@expiration_lead_days=0",
-
"@skip_file_sets=true",
-
"@test_mode=true",
-
"@to_console=false",
-
"@verbose=false",
-
""]
-
1
expect(subject).to receive(:about_to_expire_embargoes_for_lead_days).with lead_days: 7
-
1
expect(subject).to receive(:about_to_expire_embargoes_for_lead_days).with lead_days: 1
-
1
subject.run
-
end
-
end
-
end
-
-
-
1
describe "#about_to_expire_embargoes_for_lead_days" do
-
1
before {
-
5
allow(subject).to receive(:run_msg).with "about_to_expire_embargoes_for_lead_days: lead_days=21 days"
-
5
subject.instance_variable_set(:@now, DateTime.new(2025, 2, 2))
-
5
allow(subject).to receive(:run_msg).with "lead_date=20250202"
-
5
allow(subject).to receive(:run_msg).with "1001 embargo_release_date=20250303"
-
5
subject.instance_variable_set(:@email_owner, "email owner")
-
5
subject.instance_variable_set(:@test_mode, "test mode")
-
5
subject.instance_variable_set(:@verbose, "verbose")
-
}
-
-
1
context "when @skip_file_sets is true and asset.model_name is 'FileSet'" do
-
1
before {
-
1
subject.instance_variable_set(:@skip_file_sets, true)
-
-
1
asset = MockAsset.new(id: 1001, model_name: "FileSet")
-
1
subject.instance_variable_set(:@assets, [asset])
-
1
allow(subject).to receive(:asset_embargo_release_date).with(asset: asset).and_return DateTime.new(2025, 3, 3)
-
}
-
1
it "skips loop through @assets " do
-
1
expect(subject).to receive(:run_msg).with "about_to_expire_embargoes_for_lead_days: lead_days=21 days"
-
1
expect(subject).to receive(:run_msg).with "lead_date=20250223"
-
-
1
subject.about_to_expire_embargoes_for_lead_days lead_days: MockLeadTime.new(days: 21)
-
end
-
end
-
-
1
context "when @skip_file_sets is false and asset.model_name is 'FileSet'" do
-
1
before {
-
1
subject.instance_variable_set(:@skip_file_sets, false)
-
-
1
asset = MockAsset.new(id: 1001, model_name: "FileSet")
-
1
subject.instance_variable_set(:@assets, [asset])
-
1
allow(subject).to receive(:asset_embargo_release_date).with(asset: asset).and_return DateTime.new(2025, 3, 3)
-
}
-
1
it "outputs embargo_release_date" do
-
1
expect(subject).to receive(:run_msg).with "about_to_expire_embargoes_for_lead_days: lead_days=21 days"
-
1
expect(subject).to receive(:run_msg).with "lead_date=20250223"
-
1
expect(subject).to receive(:run_msg).with "1001 embargo_release_date=20250303"
-
-
1
subject.about_to_expire_embargoes_for_lead_days lead_days: MockLeadTime.new(days: 21)
-
end
-
end
-
-
1
context "when @skip_file_sets is true and asset.model_name is not 'FileSet'" do
-
1
before {
-
1
subject.instance_variable_set(:@skip_file_sets, true)
-
-
1
asset = MockAsset.new(id: 1001, model_name: "model name")
-
1
subject.instance_variable_set(:@assets, [asset])
-
1
allow(subject).to receive(:asset_embargo_release_date).with(asset: asset).and_return DateTime.new(2025, 3, 3)
-
}
-
-
1
it "outputs embargo_release_date" do
-
1
expect(subject).to receive(:run_msg).with "about_to_expire_embargoes_for_lead_days: lead_days=21 days"
-
1
expect(subject).to receive(:run_msg).with "lead_date=20250223"
-
1
expect(subject).to receive(:run_msg).with "1001 embargo_release_date=20250303"
-
-
1
subject.about_to_expire_embargoes_for_lead_days lead_days: MockLeadTime.new(days: 21)
-
end
-
end
-
-
1
context "when embargo_release_date equals lead_date" do
-
1
asset = MockAsset.new(id: 1001, model_name: "model name")
-
-
1
before {
-
2
subject.instance_variable_set(:@skip_file_sets, false)
-
-
2
subject.instance_variable_set(:@assets, [asset])
-
2
allow(subject).to receive(:asset_embargo_release_date).with(asset: asset).and_return DateTime.new(2025, 2, 23)
-
}
-
-
1
context "when @test_mode is false" do
-
1
mockLeadTime = MockLeadTime.new(days: 21)
-
-
1
before {
-
1
subject.instance_variable_set(:@test_mode, false)
-
1
allow(subject).to receive(:about_to_expire_embargo_email).with( asset: asset,
-
expiration_days: mockLeadTime,
-
email_owner: "email owner",
-
test_mode: false,
-
verbose: "verbose" )
-
}
-
1
it "calls about_to_expire_embargo_email" do
-
1
expect(subject).to receive(:run_msg).with "about_to_expire_embargoes_for_lead_days: lead_days=21 days"
-
1
expect(subject).to receive(:run_msg).with "lead_date=20250223"
-
1
expect(subject).to receive(:run_msg).with "1001 embargo_release_date=20250223"
-
1
expect(subject).to receive(:about_to_expire_embargo_email).with( asset: asset,
-
expiration_days: mockLeadTime,
-
email_owner: "email owner",
-
test_mode: false,
-
verbose: "verbose" )
-
-
1
subject.about_to_expire_embargoes_for_lead_days lead_days: mockLeadTime
-
end
-
end
-
-
1
context "when @test_mode is true" do
-
1
before {
-
1
subject.instance_variable_set(:@test_mode, true)
-
}
-
1
it "outputs message" do
-
1
expect(subject).to receive(:run_msg).with "about_to_expire_embargoes_for_lead_days: lead_days=21 days"
-
1
expect(subject).to receive(:run_msg).with "lead_date=20250223"
-
1
expect(subject).to receive(:run_msg).with "1001 embargo_release_date=20250223"
-
-
1
expect(subject).to receive(:run_msg).with "about to call about_to_expire_embargo_email for asset 1001"
-
-
1
subject.about_to_expire_embargoes_for_lead_days lead_days: MockLeadTime.new(days: 21)
-
end
-
end
-
end
-
end
-
-
-
1
describe "#run_msg" do
-
1
before {
-
2
allow(Deepblue::LoggingHelper).to receive(:debug).with "message"
-
}
-
-
1
context "when @to_console is false" do
-
1
before {
-
1
subject.instance_variable_set(:@to_console, false)
-
}
-
1
it "calls LoggingHelper.debug" do
-
1
expect(Deepblue::LoggingHelper).to receive(:debug).with "message"
-
-
1
subject.run_msg "message"
-
end
-
end
-
-
1
context "when @to_console is true" do
-
1
before {
-
1
subject.instance_variable_set(:@to_console, true)
-
}
-
1
it "calls LoggingHelper.debug" do
-
1
expect(Deepblue::LoggingHelper).to receive(:debug).with "message"
-
1
expect(subject).to receive(:puts).with("message")
-
-
1
subject.run_msg "message"
-
end
-
end
-
end
-
-
end
-
1
require 'rails_helper'
-
-
1
class MockExpiredAsset
-
-
1
def initialize (model_name: "FileSet")
-
2
@model_name = model_name
-
end
-
1
def to_ary
-
3
[self]
-
end
-
-
1
def model_name
-
3
@model_name
-
end
-
-
1
def id
-
3
"101"
-
end
-
-
1
def human_readable_type
-
1
"typographic"
-
end
-
-
1
def solr_document
-
1
OpenStruct.new(title: 'solaris')
-
end
-
-
1
def embargo_release_date
-
1
"April 2nd 2025"
-
end
-
-
1
def visibility_after_embargo
-
1
"sparkling clear"
-
end
-
end
-
-
-
-
1
RSpec.describe Deepblue::DeactivateExpiredEmbargoesService do
-
-
1
describe "#initialize" do
-
2
subject { described_class.new( email_owner: false, skip_file_sets: false, test_mode: false, to_console: true, verbose: true) }
-
-
1
before {
-
1
allow(Deepblue::LoggingHelper).to receive(:here).and_return "here"
-
1
allow(Deepblue::LoggingHelper).to receive(:called_from).and_return "from"
-
1
allow(Deepblue::LoggingHelper).to receive(:obj_class).with("class", anything).and_return "object class"
-
1
allow(Deepblue::LoggingHelper).to receive(:bold_debug).with ["here",
-
"from",
-
"object class",
-
"email_owner=false",
-
"skip_file_sets=false",
-
"test_mode=false",
-
"to_console=true",
-
"verbose=true",
-
""]
-
}
-
-
1
it "calls LoggingHelper.bold_debug" do
-
1
expect(Deepblue::LoggingHelper).to receive(:here).and_return "here"
-
1
expect(Deepblue::LoggingHelper).to receive(:called_from).and_return "from"
-
1
expect(Deepblue::LoggingHelper).to receive(:obj_class).with("class", anything).and_return "object class"
-
-
1
expect(Deepblue::LoggingHelper).to receive(:bold_debug).with ["here",
-
"from",
-
"object class",
-
"email_owner=false",
-
"skip_file_sets=false",
-
"test_mode=false",
-
"to_console=true",
-
"verbose=true",
-
""]
-
1
subject.instance_variable_get(:@email_owner) == false
-
1
subject.instance_variable_get(:@skip_file_sets) == false
-
1
subject.instance_variable_get(:@test_mode) == false
-
1
subject.instance_variable_get(:@to_console) == true
-
1
subject.instance_variable_get(:@verbose) == true
-
end
-
end
-
-
1
describe "#run" do
-
1
mock_asset = MockExpiredAsset.new
-
-
1
before {
-
3
allow(Deepblue::LoggingHelper).to receive(:here).and_return "here"
-
3
allow(Deepblue::LoggingHelper).to receive(:called_from).and_return "from"
-
3
allow(Deepblue::LoggingHelper).to receive(:obj_class).with("class", anything).and_return "object class"
-
-
3
allow(DateTime).to receive(:now).and_return DateTime.new(2025, 6, 1)
-
3
allow(Hyrax::EmbargoService).to receive(:assets_with_expired_embargoes).and_return(mock_asset)
-
3
allow(subject).to receive(:run_msg).with "The number of assets with expired embargoes is: 1"
-
}
-
-
1
context "when verbose is false and asset.model_name is not 'FileSet'" do
-
1
mock_asset_2 = MockExpiredAsset.new(model_name: "Work")
-
2
subject { described_class.new( skip_file_sets: true, verbose: false) }
-
-
1
before {
-
1
allow(Deepblue::LoggingHelper).to receive(:bold_debug).with ["here",
-
"from",
-
""]
-
-
1
allow(Hyrax::EmbargoService).to receive(:assets_with_expired_embargoes).and_return(mock_asset_2)
-
-
1
allow(::ActiveFedora::Base).to receive(:find).with("101").and_return "basic"
-
1
allow(Deepblue::ProvenanceHelper).to receive(:system_as_current_user).and_return "au courant"
-
1
allow(subject).to receive(:deactivate_embargo).with(curation_concern: "basic",
-
copy_visibility_to_files: true,
-
current_user: "au courant",
-
email_owner: true,
-
test_mode: true,
-
verbose: false )
-
}
-
-
1
it "calls deactivate_embargo" do
-
1
expect(Deepblue::LoggingHelper).to receive(:bold_debug).with ["here",
-
"from",
-
"object class",
-
"@email_owner=true",
-
"@skip_file_sets=true",
-
"@test_mode=true",
-
""]
-
-
1
subject.instance_variable_get(:@now) == DateTime.new(2025, 6, 1)
-
1
subject.instance_variable_get(:@assets) == [mock_asset_2]
-
1
expect(subject).not_to receive(:run_msg)
-
1
expect(::ActiveFedora::Base).to receive(:find).with("101").and_return "basic"
-
1
expect(Deepblue::ProvenanceHelper).to receive(:system_as_current_user).and_return "au courant"
-
1
expect(subject).to receive(:deactivate_embargo).with(curation_concern: "basic",
-
copy_visibility_to_files: true,
-
current_user: "au courant",
-
email_owner: true,
-
test_mode: true,
-
verbose: false )
-
1
subject.run
-
end
-
end
-
-
1
context "when verbose is true and @skip_file_sets is false" do
-
2
subject { described_class.new( skip_file_sets: false, verbose: true) }
-
-
1
before {
-
1
allow(Deepblue::LoggingHelper).to receive(:bold_debug).with ["here",
-
"from",
-
""]
-
-
1
allow(subject).to receive(:run_msg).with "The number of assets with expired embargoes is: 1"
-
1
allow(subject).to receive(:run_msg).with "0 - 101, FileSet, typographic, solaris April 2nd 2025, sparkling clear"
-
1
allow(::ActiveFedora::Base).to receive(:find).with("101").and_return "basic"
-
1
allow(Deepblue::ProvenanceHelper).to receive(:system_as_current_user).and_return "au courant"
-
1
allow(subject).to receive(:deactivate_embargo).with(curation_concern: "basic",
-
copy_visibility_to_files: true,
-
current_user: "au courant",
-
email_owner: true,
-
test_mode: true,
-
verbose: true )
-
}
-
-
1
it "outputs messages and calls deactivate_embargo" do
-
1
expect(Deepblue::LoggingHelper).to receive(:bold_debug).with ["here",
-
"from",
-
"object class",
-
"@email_owner=true",
-
"@skip_file_sets=false",
-
"@test_mode=true",
-
""]
-
-
1
subject.instance_variable_get(:@now) == DateTime.new(2025, 6, 1)
-
1
subject.instance_variable_get(:@assets) == [mock_asset]
-
1
expect(subject).to receive(:run_msg).with "The number of assets with expired embargoes is: 1"
-
1
expect(subject).to receive(:run_msg).with "0 - 101, FileSet, typographic, solaris April 2nd 2025, sparkling clear"
-
1
expect(::ActiveFedora::Base).to receive(:find).with("101").and_return "basic"
-
1
expect(Deepblue::ProvenanceHelper).to receive(:system_as_current_user).and_return "au courant"
-
1
expect(subject).to receive(:deactivate_embargo).with(curation_concern: "basic",
-
copy_visibility_to_files: true,
-
current_user: "au courant",
-
email_owner: true,
-
test_mode: true,
-
verbose: true )
-
1
subject.run
-
end
-
end
-
-
1
context "when verbose is false and @skip_file-sets is true" do
-
2
subject { described_class.new( skip_file_sets: true, verbose: false) }
-
-
1
before {
-
1
allow(Deepblue::LoggingHelper).to receive(:bold_debug).with ["here",
-
"from",
-
""]
-
}
-
-
1
it "does not output messages" do
-
1
expect(Deepblue::LoggingHelper).to receive(:bold_debug).with ["here",
-
"from",
-
"object class",
-
"@email_owner=true",
-
"@skip_file_sets=true",
-
"@test_mode=true",
-
""]
-
1
expect(subject).not_to receive(:run_msg)
-
1
expect(subject).not_to receive(:deactivate_embargo)
-
-
1
subject.instance_variable_get(:@now) == DateTime.new(2025, 6, 1)
-
1
subject.instance_variable_get(:@assets) == [mock_asset]
-
-
1
subject.run
-
end
-
end
-
end
-
-
1
describe "#run_msg" do
-
1
before {
-
2
allow(Deepblue::LoggingHelper).to receive(:debug).with "message"
-
}
-
-
1
context "when @to_console is false" do
-
1
before {
-
1
subject.instance_variable_set(:@to_console, false)
-
}
-
1
it "calls LoggingHelper.debug" do
-
1
expect(Deepblue::LoggingHelper).to receive(:debug).with "message"
-
-
1
subject.run_msg "message"
-
end
-
end
-
-
1
context "when @to_console is true" do
-
1
before {
-
1
subject.instance_variable_set(:@to_console, true)
-
}
-
1
it "calls LoggingHelper.debug" do
-
1
expect(Deepblue::LoggingHelper).to receive(:debug).with "message"
-
1
expect(subject).to receive(:puts).with("message")
-
-
1
subject.run_msg "message"
-
end
-
end
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
require 'rails_helper'
-
-
1
describe Deepblue::DoiMintingService do
-
-
1
context "when minting a new doi" do
-
6
subject { described_class.new( work: work, current_user: "test_doi_minting_service@umich.edu" ) }
-
6
let(:work) { mock_model(GenericWork, id: '123', title: ['demotitle'],
-
creator: ['Smith, John', 'Smith, Jane', 'O\'Rielly, Kelly'])}
-
6
let(:work_url) { "umrdr-testing.hydra.lib.umich.edu/concern/work/#{work.id}" }
-
6
let(:dummy_doi) { "doi:10.5072/FK2DEAD455BEEF" }
-
6
let(:identifier) { instance_double(Ezid::Identifier, id: dummy_doi) }
-
-
1
before do
-
5
allow(Rails).to receive_message_chain("application.routes.url_helpers.hyrax_data_set_url").and_return(work_url)
-
5
allow(work).to receive(:save)
-
5
allow(work).to receive(:reload)
-
5
allow(work).to receive(:doi).and_return(identifier.id)
-
5
allow(work).to receive(:doi=)
-
5
allow(work).to receive(:provenance_mint_doi)
-
5
allow(subject).to receive(:doi_server_reachable?).and_return(true)
-
5
allow(Ezid::Identifier).to receive(:mint).and_return(identifier)
-
end
-
-
1
it "has expected metadata" do
-
1
expect(subject.metadata.datacite_title).to eq(work.title.first)
-
1
expect(subject.metadata.datacite_publisher).to eq(described_class::PUBLISHER)
-
1
expect(subject.metadata.datacite_publicationyear).to eq(Date.today.year.to_s)
-
1
expect(subject.metadata.datacite_resourcetype).to eq(described_class::RESOURCE_TYPE)
-
1
expect(subject.metadata.datacite_creator).to eq(work.creator.join(';'))
-
1
expect(subject.metadata.target).not_to be_empty
-
end
-
-
1
it "calls out to EZID to mint a doi" do
-
1
expect(Ezid::Identifier).to receive(:mint)
-
1
subject.run
-
end
-
-
1
it "returns the id value of the identifier" do
-
1
expect(subject.run).to eq(identifier.id)
-
end
-
-
1
it "assigns the doi value and saves the work" do
-
1
expect(work).to receive(:doi=).with(identifier.id)
-
1
expect(work).to receive(:save)
-
1
subject.run
-
end
-
-
1
context "EZID service is unreachable" do
-
1
before do
-
1
allow(subject).to receive(:doi_server_reachable?).and_return(false)
-
end
-
1
it "does not attempt to mint a doi" do
-
1
expect(subject).not_to receive(:mint_doi)
-
1
expect(subject.run).to eq "doi:10.5072/FK2DEAD455BEEF"
-
end
-
end
-
end
-
-
1
context "when actually calling out to service" do
-
1
let(:work) { GenericWork.new(id: '123', title: ['demotitle'],
-
creator: ['Smith, John', 'Smith, Jane', 'O\'Rielly, Kelly'])}
-
1
let( :current_user ) { "test_doi_minting_service@umich.edu" }
-
1
it "mints a doi" do
-
1
skip unless ENV['INTEGRATION']
-
expect(described_class.mint_doi_for( work: work, current_user: current_user ) ).to start_with 'doi:10.5072/FK2'
-
end
-
end
-
-
end
-
1
require 'rails_helper'
-
-
1
class OutputMock
-
-
1
def puts (text)
-
1
text
-
end
-
end
-
-
1
RSpec.describe Deepblue::LogExporter do
-
10
let(:setup_output) { OutputMock.new }
-
10
subject { described_class.new( filter: nil, input: "input", output: setup_output) }
-
-
1
describe 'constants' do
-
1
it do
-
1
expect( Deepblue::LogExporter::DEFAULT_PP_EXPORT ).to eq false
-
end
-
end
-
-
1
describe "#initialize" do
-
1
it "calls super" do
-
1
skip "Add a test"
-
end
-
-
1
context "when verbose is false" do
-
1
it "sets instance variables" do
-
1
subject.instance_variable_get(:@output) == setup_output
-
1
subject.instance_variable_get(:@pp_export) == false
-
end
-
end
-
-
1
context "when verbose is true" do
-
1
it "sets instance variables, calls puts method" do
-
1
skip "Add a test"
-
end
-
end
-
end
-
-
1
describe "#export_line" do
-
1
context "when pp_export is equivalent to true" do
-
1
before {
-
1
allow(subject).to receive(:pp_export).and_return true
-
}
-
1
it "calls pretty_print_line" do
-
1
expect(subject).to receive(:pretty_print_line).with "line", "timestamp", "event", "event_note", "class_name", "id", "raw"
-
1
subject.export_line "line", "timestamp", "event", "event_note", "class_name", "id", "raw"
-
end
-
end
-
-
1
context "when pp_export is equivalent to false" do
-
1
before {
-
1
allow(subject).to receive(:pp_export).and_return false
-
}
-
1
it "calls @output.puts" do
-
1
expect(subject.export_line"line", "timestamp", "event", "event_note", "class_name", "id", "raw")
-
.to eq "line"
-
end
-
end
-
end
-
-
1
describe "#pretty_print_line" do
-
1
before {
-
1
allow(subject.output).to receive(:puts).with("timestamp event/event_note/class_name/id")
-
1
allow(subject.output).to receive(:puts).with("{\n \"foo\": \"bar\",\n \"ping\": \"pong\"\n}")
-
}
-
-
1
it "calls @output.puts twice" do
-
1
expect(subject.output).to receive(:puts).with("timestamp event/event_note/class_name/id")
-
1
expect(subject.pretty_print_line "line", "timestamp", "event", "event_note", "class_name", "id", '{"foo":"bar", "ping":"pong"}')
-
end
-
end
-
-
1
describe "#output_mode" do
-
1
context "when @output_mode has no value" do
-
1
before {
-
1
allow(subject).to receive(:option).with(key: 'output_mode', default_value: 'w').and_return "output_mode=w"
-
}
-
1
it "sets and returns output mode" do
-
1
expect(subject).to receive(:option).with(key: 'output_mode', default_value: 'w')
-
1
expect(subject.output_mode).to eq "output_mode=w"
-
1
subject.instance_variable_get(:@output_mode) == "output_mode=w"
-
end
-
end
-
-
1
context "when @output_mode has a value" do
-
1
before {
-
1
subject.instance_variable_set(:@output_mode, "high value")
-
1
allow(subject).to receive(:option).with(key: 'output_mode', default_value: 'w').and_return "output_mode=w"
-
}
-
1
it "sets and returns output mode" do
-
1
expect(subject).not_to receive(:option).with(key: 'output_mode', default_value: 'w')
-
1
expect(subject.output_mode).to eq "high value"
-
1
subject.instance_variable_get(:@output_mode) == "high value"
-
end
-
end
-
end
-
-
1
describe "#run" do
-
1
before {
-
2
allow(subject).to receive(:log_open_output)
-
2
allow(subject).to receive(:readlines).and_return( "line",
-
"timestamp",
-
"event",
-
"event_note",
-
"class_name",
-
"id",
-
"raw_key_values" )
-
2
allow(subject).to receive(:log_close_output)
-
}
-
-
1
it "calls log_open_output, readlines, and log_close_output" do
-
1
expect(subject).to receive(:log_open_output)
-
1
expect(subject).to receive(:readlines)
-
1
expect(subject).to receive(:log_close_output)
-
-
1
subject.run
-
-
1
subject.instance_variable_get(:@lines_exported) == 1
-
end
-
-
1
it "calls export_line" do
-
1
skip "Add a test"
-
end
-
end
-
-
1
describe "#quick_report" do
-
1
before {
-
# stubbing for super
-
1
subject.instance_variable_set(:@input_pathname, "strawberries")
-
1
subject.instance_variable_set(:@lines_read, "watermelon")
-
1
subject.instance_variable_set(:@lines_parsed, "yuzu")
-
-
1
subject.instance_variable_set(:@output_pathname, "bananas")
-
1
subject.instance_variable_set(:@lines_exported, "oranges")
-
}
-
-
1
it "calls super, outputs text" do
-
# super expectations
-
1
expect(subject).to receive(:puts).with no_args
-
1
expect(subject).to receive(:puts).with "Quick report"
-
1
expect(subject).to receive(:puts).with "input_pathname: strawberries"
-
1
expect(subject).to receive(:puts).with "lines_read: watermelon"
-
1
expect(subject).to receive(:puts).with "lines_parsed: yuzu"
-
-
1
expect(subject).to receive(:puts).with "output_pathname: bananas"
-
1
expect(subject).to receive(:puts).with "lines_exported: oranges"
-
-
1
subject.quick_report
-
end
-
end
-
-
end
-
1
require 'rails_helper'
-
-
1
class MockLogFilter
-
-
1
def initialize ( all_log_filter = false )
-
29
@all_log_filter = all_log_filter
-
end
-
1
def all_log_filter?
-
6
@all_log_filter
-
end
-
-
1
def and ( new_filters: )
-
1
"bran flakes"
-
end
-
-
1
def to_ary
-
4
[self]
-
end
-
-
1
def or (new_filters: )
-
1
"corn flakes"
-
end
-
end
-
-
-
1
RSpec.describe Deepblue::LogReader do
-
20
let( :initial_filter ) { MockLogFilter.new all_log_filter: true}
-
20
subject { described_class.new(filter: initial_filter, input: "input", options: {"red" => "green"} ) }
-
-
1
describe 'constants' do
-
1
it do
-
1
expect( Deepblue::LogReader::DEFAULT_BEGIN_TIMESTAMP ).to be_blank
-
1
expect( Deepblue::LogReader::DEFAULT_END_TIMESTAMP ).to be_blank
-
1
expect( Deepblue::LogReader::DEFAULT_TIMESTAMP_FORMAT ).to be_blank
-
1
expect( Deepblue::LogReader::DEFAULT_VERBOSE ).to eq false
-
1
expect( Deepblue::LogReader::DEFAULT_VERBOSE_FILTER ).to eq false
-
end
-
end
-
-
1
describe "#initialize" do
-
1
it "sets instance variables" do
-
1
subject.instance_variable_get(:@filter) == initial_filter
-
1
subject.instance_variable_get(:@input) == "input"
-
1
subject.instance_variable_get(:@options) == {"red" => "green"}
-
1
subject.instance_variable_get(:@verbose) == false
-
1
subject.instance_variable_get(:@verbose_filter) == false
-
end
-
-
1
it "calls add_date_range_filter" do
-
1
skip "Add a test"
-
end
-
end
-
-
1
describe "#initialize_filter" do
-
1
context "when filter is blank" do
-
1
before {
-
1
allow(Deepblue::AllLogFilter).to receive(:new)
-
}
-
1
it "calls AllLogFilter.new" do
-
1
expect(Deepblue::AllLogFilter).to receive(:new)
-
-
1
expect(subject.initialize_filter nil).to be_blank
-
end
-
end
-
-
1
context "when filter is an array" do
-
1
before {
-
1
allow(Deepblue::AndLogFilter).to receive(:new).with( filters: ["filtration", "salination"], options: {}).and_return "particulate matter"
-
}
-
1
it "calls AndLogFilter.new" do
-
1
allow(Deepblue::AndLogFilter).to receive(:initialize).with( filters: ["filtration", "salination"], options: {})
-
-
1
expect(subject.initialize_filter ["filtration", "salination"]).to eq "particulate matter"
-
end
-
end
-
-
1
context "when filter is not an array and not blank" do
-
1
it "returns filter" do
-
1
expect(subject.initialize_filter "condensation").to eq "condensation"
-
end
-
end
-
end
-
-
-
1
describe "#add_date_range_filter" do
-
-
1
context "when option function returns blank every time" do
-
1
before {
-
1
allow(subject).to receive("option").with(key: 'begin')
-
1
allow(subject).to receive("option").with(key: 'begin_timestamp', default_value: '')
-
-
1
allow(subject).to receive("option").with(key: 'end')
-
1
allow(subject).to receive("option").with(key: 'end_timestamp', default_value: '')
-
-
1
allow(subject).to receive("option").with(key: 'format')
-
1
allow(subject).to receive("option").with(key: 'timestamp_format', default_value: '')
-
-
1
allow(subject).to receive("verbose_filter").and_return(false)
-
}
-
1
it "returns nil" do
-
1
expect(subject).to receive("option").with(key: 'begin')
-
1
expect(subject).to receive("option").with(key: 'begin_timestamp', default_value: '')
-
-
1
expect(subject).to receive("option").with(key: 'end')
-
1
expect(subject).to receive("option").with(key: 'end_timestamp', default_value: '')
-
-
1
expect(subject).to receive("option").with(key: 'format')
-
1
expect(subject).to receive("option").with(key: 'timestamp_format', default_value: '')
-
-
1
expect(subject.add_date_range_filter).to be_blank
-
end
-
end
-
-
1
context "when begin_timestamp and end_timestamp not blank and verbose_filter" do
-
1
before {
-
1
allow(subject).to receive("option").with(key: 'begin').and_return( "good times" )
-
1
allow(subject).to receive("option").with(key: 'end').and_return( "best times" )
-
1
allow(subject).to receive("option").with(key: 'format').and_return( "worst times" )
-
-
1
allow(subject).to receive("verbose_filter").and_return( true )
-
-
1
allow(Deepblue::DateLogFilter).to receive("new").with( begin_timestamp: "good times" ,
-
end_timestamp: "best times" ,
-
timestamp_format: "worst times",
-
options: Hash.new(["wherefore" => "whyfor"]) )
-
.and_return( "home on the range" )
-
1
allow(subject).to receive("filter_and").with( new_filters: "home on the range" )
-
}
-
1
it "calls DateLogFilter.new and filter_and" do
-
1
expect(subject).to receive("option").with(key: 'begin')
-
1
expect(subject).to receive("option").with(key: 'end')
-
1
expect(subject).to receive("option").with(key: 'format')
-
-
1
expect(subject).to receive("puts").with "add_date_range_filter begin_timestamp=good times end_timestamp=best times"
-
-
1
expect(Deepblue::DateLogFilter).to receive("new").with( begin_timestamp: "good times" ,
-
end_timestamp: "best times" ,
-
timestamp_format: "worst times",
-
options: Hash.new(["wherefore" => "whyfor"]) )
-
1
expect(subject).to receive("filter_and").with( new_filters: "home on the range" )
-
-
1
expect(subject.add_date_range_filter options: Hash.new(["wherefore" => "whyfor"])).to be_blank
-
end
-
end
-
end
-
-
1
describe "#filter_and" do
-
1
context "when new_filters is blank" do
-
1
it "returns blank" do
-
-
1
expect(subject.filter_and new_filters: nil).to be_blank
-
end
-
end
-
-
1
context "when verbose" do
-
1
before {
-
1
allow(subject).to receive(:verbose).and_return(true)
-
1
allow(subject).to receive(:filter_refresh).with( current_filter: initial_filter, new_filters: "consonant", append: true, options: {} )
-
.and_return "globalization"
-
1
allow(subject).to receive(:puts).with( "filter_and @filter=globalization" )
-
}
-
-
1
it "outputs string" do
-
1
expect(subject).to receive(:filter_refresh).with( current_filter: initial_filter, new_filters: "consonant", append: true, options: {} )
-
1
expect(subject).to receive(:puts).with("filter_and @filter=globalization" )
-
-
1
subject.filter_and new_filters: "consonant"
-
end
-
end
-
end
-
-
1
describe "#filter_refresh" do
-
-
1
context "when current_filter.all_log_filter is true and new_filters is an array" do
-
1
before {
-
1
allow(Deepblue::AndLogFilter).to receive(:new).with( filters: ["pop", "corn"], options: {} ).and_return "cornflakes"
-
}
-
-
1
it "return new_filters" do
-
1
expect(subject.filter_refresh current_filter: MockLogFilter.new( all_log_filter = true ), new_filters: ["pop", "corn"]).to eq "cornflakes"
-
end
-
end
-
-
1
context "when current_filter.all_log_filter is true and new_filters is not an array" do
-
1
it "return new_filters" do
-
1
expect(subject.filter_refresh current_filter: MockLogFilter.new( all_log_filter = true ), new_filters: "brand new").to eq "brand new"
-
end
-
end
-
-
1
context "when current_filter.all_log_filter is false and append is true" do
-
1
it "return new_filters" do
-
1
expect(subject.filter_refresh current_filter: MockLogFilter.new, new_filters: "brand new", append: true).to eq "bran flakes"
-
end
-
end
-
-
1
context "when current_filter.all_log_filter is false and append is false" do
-
1
it "return new_filters" do
-
1
currentFilter = MockLogFilter.new
-
1
newFilter = MockLogFilter.new
-
1
expect(Deepblue::AndLogFilter).to receive(:new).with(filters: [newFilter, currentFilter], options: {1 => "Alpha"})
-
1
expect(subject.filter_refresh current_filter: currentFilter, new_filters: newFilter, append: false, options: {1 => "Alpha"})
-
end
-
end
-
end
-
-
-
1
describe "#filter_or" do
-
1
context "when new_filters is blank" do
-
1
it "returns blank" do
-
-
1
expect(subject.filter_or new_filters: []).to be_blank
-
end
-
end
-
-
1
context "when new_filters is not blank and LogReader initialized with filter that has all_log_filter true" do
-
-
1
it "when current_filter all_log_filter is true" do
-
1
expect(subject.filter_or new_filters: MockLogFilter.new, append: true).to eq initial_filter
-
end
-
end
-
-
1
context "when new_filters is not blank and LogReader initialized with filter that has all_log_filter false" do
-
2
subject { described_class.new(filter: MockLogFilter.new, input: "input" ) }
-
-
1
it "returns current_filter or new_filters when append is true" do
-
1
expect(subject.filter_or new_filters: MockLogFilter.new, append: true).to eq "corn flakes"
-
end
-
end
-
-
1
context "when new_filters is not blank and LogReader initialized with filter that has all_log_filter false and append false" do
-
1
firstFilter = MockLogFilter.new
-
1
secondFilter = MockLogFilter.new
-
2
subject { described_class.new(filter: firstFilter, input: "input" ) }
-
-
1
it "returns " do
-
1
expect(Deepblue::OrLogFilter).to receive(:new).with(filters: [secondFilter, firstFilter], options: {10 => "Omega"})
-
1
subject.filter_or new_filters: secondFilter, append: false, options: {10 => "Omega"}
-
end
-
end
-
end
-
-
1
describe "#input_mode" do
-
1
before {
-
1
allow(subject).to receive(:option).with(key: 'input_mode', default_value: 'r').and_return ("trail mix")
-
}
-
1
it "calls option function" do
-
1
expect(subject).to receive(:option).with(key: 'input_mode', default_value: 'r')
-
-
1
expect(subject.input_mode).to eq "trail mix"
-
end
-
end
-
-
1
describe "#parse_line" do
-
-
1
context "when @current_line is blank" do
-
1
before {
-
1
subject.instance_variable_set(:@current_line, " ")
-
}
-
1
it "returns" do
-
1
expect(Deepblue::ProvenanceHelper).not_to receive(:parse_log_line).with any_args
-
1
subject.parse_line
-
end
-
end
-
-
1
context "when LogParseError is raised" do
-
1
before {
-
1
subject.instance_variable_set(:@current_line, "linear")
-
1
subject.instance_variable_set(:@lines_read, 22)
-
1
allow(Deepblue::ProvenanceHelper).to receive(:parse_log_line).and_raise(Deepblue::LogParseError, "log parse error")
-
}
-
-
1
it "puts LogParseError message" do
-
1
expect(subject).to receive(:puts).with("log parse error")
-
1
subject.parse_line
-
end
-
end
-
-
1
context "when @current_line is not blank" do
-
1
before {
-
1
subject.instance_variable_set(:@current_line, "linear")
-
1
subject.instance_variable_set(:@lines_read, 22)
-
1
subject.instance_variable_set(:@lines_parsed, 0)
-
}
-
-
1
it "calls ProvenanceHelper" do
-
1
expect(Deepblue::ProvenanceHelper).to receive(:parse_log_line).with("linear", line_number: 22, raw_key_values: true)
-
1
subject.parse_line
-
1
subject.instance_variable_get(:@lines_parsed) == 1
-
1
subject.instance_variable_get(:@parsed) == true
-
end
-
end
-
end
-
-
1
describe "#quick_report" do
-
1
before {
-
1
subject.instance_variable_set(:@input_pathname, "strawberries")
-
1
subject.instance_variable_set(:@lines_read, "watermelon")
-
1
subject.instance_variable_set(:@lines_parsed, "yuzu")
-
}
-
-
1
it "outputs text" do
-
1
expect(subject).to receive(:puts).with no_args
-
1
expect(subject).to receive(:puts).with "Quick report"
-
1
expect(subject).to receive(:puts).with "input_pathname: strawberries"
-
1
expect(subject).to receive(:puts).with "lines_read: watermelon"
-
1
expect(subject).to receive(:puts).with "lines_parsed: yuzu"
-
-
1
subject.quick_report
-
end
-
end
-
-
1
pending "#readlines"
-
-
end
-
1
require 'rails_helper'
-
-
1
RSpec.describe Deepblue::LogReporter do
-
5
subject { described_class.new(filter: "filter", input: "input", options: {"blue" => "gold"} ) }
-
-
1
describe "#initialize" do
-
1
it "initializes instance variables" do
-
# called by initialize in parent class
-
1
subject.instance_variable_get(:@filter) == "filter"
-
1
subject.instance_variable_get(:@input) == "input"
-
1
subject.instance_variable_get(:@options) == {"blue" => "gold"}
-
1
subject.instance_variable_get(:@verbose) == false
-
1
subject.instance_variable_get(:@verbose_filter) == false
-
-
1
subject.instance_variable_get(:@output_close) == false
-
1
subject.instance_variable_get(:@output_mode) == 'w'
-
1
subject.instance_variable_get(:@output_pathname).nil? == true
-
end
-
-
1
it "super calls add_date_range_filter" do
-
1
allow(Deepblue::LogReader).to receive(:new)
-
1
Deepblue::LogReporter.new(filter: nil, input:"input", options: {})
-
1
expect(Deepblue::LogReader).to have_received(:new)
-
end
-
end
-
-
1
describe "#report" do
-
1
before {
-
1
allow(subject).to receive(:run)
-
-
1
subject.instance_variable_set(:@timestamp_first, "first!")
-
1
subject.instance_variable_set(:@timestamp_last, "last...")
-
1
allow(subject).to receive(:ids).and_return "1,2,3"
-
1
allow(subject).to receive(:events).and_return "eventually"
-
1
allow(subject).to receive(:class_events).and_return "classified"
-
}
-
-
1
it "puts messages" do
-
1
expect(subject).to receive(:puts).with "timestamp_first = first!"
-
1
expect(subject).to receive(:puts).with "timestamp_last = last..."
-
1
expect(subject).to receive(:puts).with "ids = 1,2,3"
-
1
expect(subject).to receive(:puts).with "events = eventually"
-
1
expect(subject).to receive(:puts).with "class_events = classified"
-
-
1
expect(subject).to receive(:run)
-
-
1
subject.report
-
end
-
end
-
-
1
describe "#run" do
-
1
before {
-
2
allow(subject).to receive(:initialize_report_values)
-
2
allow(subject).to receive(:readlines).and_return( ["line", "timestamp", "event", "event_note", "class_name", "id", "raw_key_values"] )
-
}
-
-
1
it "calls functions" do
-
1
expect(subject).to receive(:initialize_report_values)
-
1
expect(subject).to receive(:readlines).and_return( ["line", "timestamp", "event", "event_note", "class_name", "id", "raw_key_values"] )
-
1
subject.run
-
end
-
-
1
it "calls line_read" do
-
1
skip "Add a test"
-
end
-
end
-
-
end
-
# frozen_string_literal: true
-
-
1
RSpec.describe Deepblue::LoggingCallback do
-
-
6
let( :event_name ) { 'the_event_name' }
-
6
let( :event_line ) { ">>>>> #{event_name} >>>>>" }
-
3
let( :msg ) { 'The message.' }
-
5
let( :user ) { 'user@umich.edu' }
-
-
1
describe '.process_event' do
-
# subject { lambda { |event_name, msg| Deepblue::LoggingCallback.process_event( event_name: event_name, msg: msg ) } }
-
-
1
before do
-
1
allow( Rails.logger ).to receive( :debug ).with( any_args )
-
end
-
1
it do
-
1
described_class.process_event( event_name: event_name, msg: msg )
-
1
expect( Rails.logger ).to have_received( :debug ).with( event_line ).exactly( 4 ).times
-
1
expect( Rails.logger ).to have_received( :debug ).with( "#{event_name} >>>>> #{msg}" )
-
1
expect( Rails.logger ).to have_received( :debug ).exactly( 5 ).times
-
end
-
end
-
-
1
describe '.process_event_curation_concern' do
-
2
let( :curation_concern ) { 'DataSet XYZ' }
-
1
before do
-
1
allow( Rails.logger ).to receive( :debug ).with( any_args )
-
end
-
1
it do
-
1
described_class.process_event_curation_concern( event_name: event_name, curation_concern: curation_concern, user: user )
-
1
expect( Rails.logger ).to have_received( :debug ).with( event_line ).exactly( 4 ).times
-
1
expect( Rails.logger ).to have_received( :debug ).with( "#{event_name} >>>>> #{user} >>>>> #{curation_concern}" )
-
1
expect( Rails.logger ).to have_received( :debug ).exactly( 5 ).times
-
end
-
end
-
-
1
describe '.process_event_file_set' do
-
2
let( :file_set ) { 'FileSet XYZ' }
-
1
before do
-
1
allow( Rails.logger ).to receive( :debug ).with( any_args )
-
end
-
1
it do
-
1
described_class.process_event_file_set( event_name: event_name, file_set: file_set, user: user )
-
1
expect( Rails.logger ).to have_received( :debug ).with( event_line ).exactly( 4 ).times
-
1
expect( Rails.logger ).to have_received( :debug ).with( "#{event_name} >>>>> #{user} >>>>> #{file_set}" )
-
1
expect( Rails.logger ).to have_received( :debug ).exactly( 5 ).times
-
end
-
end
-
-
1
describe '.process_event_user' do
-
1
context 'with msg' do
-
1
before do
-
1
allow( Rails.logger ).to receive( :debug ).with( any_args )
-
end
-
1
it do
-
1
described_class.process_event_user( event_name: event_name, user: user, msg: msg )
-
1
expect( Rails.logger ).to have_received( :debug ).with( event_line ).exactly( 4 ).times
-
1
expect( Rails.logger ).to have_received( :debug ).with( "#{event_name} >>>>> #{user} >>>>> #{msg}" )
-
1
expect( Rails.logger ).to have_received( :debug ).exactly( 5 ).times
-
end
-
end
-
1
context 'without msg' do
-
1
before do
-
1
allow( Rails.logger ).to receive( :debug ).with( any_args )
-
end
-
1
it do
-
1
described_class.process_event_user( event_name: event_name, user: user, msg: '' )
-
1
described_class.process_event_user( event_name: event_name, user: user, msg: nil )
-
1
expect( Rails.logger ).to have_received( :debug ).with( event_line ).exactly( 8 ).times
-
1
expect( Rails.logger ).to have_received( :debug ).with( "#{event_name} >>>>> #{user}" ).twice
-
1
expect( Rails.logger ).to have_received( :debug ).exactly( 10 ).times
-
end
-
end
-
end
-
-
end
-
1
require 'rails_helper'
-
-
1
class VirusScanServiceMock
-
1
include ::Deepblue::VirusScanService
-
end
-
-
1
RSpec.describe Deepblue::VirusScanService do
-
8
subject { VirusScanServiceMock.new }
-
-
1
describe 'constants' do
-
1
it do
-
1
expect( Deepblue::VirusScanService::VIRUS_SCAN_ERROR ).to eq 'scan error'
-
1
expect( Deepblue::VirusScanService::VIRUS_SCAN_NOT_VIRUS ).to eq 'not virus'
-
1
expect( Deepblue::VirusScanService::VIRUS_SCAN_SKIPPED ).to eq 'scan skipped'
-
1
expect( Deepblue::VirusScanService::VIRUS_SCAN_SKIPPED_SERVICE_UNAVAILABLE ).to eq 'scan skipped service unavailable'
-
1
expect( Deepblue::VirusScanService::VIRUS_SCAN_SKIPPED_TOO_BIG ).to eq 'scan skipped too big'
-
1
expect( Deepblue::VirusScanService::VIRUS_SCAN_UNKNOWN ).to eq 'scan unknown'
-
1
expect( Deepblue::VirusScanService::VIRUS_SCAN_VIRUS ).to eq 'virus'
-
end
-
end
-
-
1
describe "#virus_scan_detected_virus?" do
-
1
context "when argument equals VIRUS_SCAN_VIRUS constant" do
-
1
it "returns true" do
-
1
expect(subject.virus_scan_detected_virus? scan_result: "virus").to eq true
-
end
-
end
-
-
1
context "when argument does not equal VIRUS_SCAN_VIRUS constant" do
-
1
it "returns false" do
-
1
expect(subject.virus_scan_detected_virus? scan_result: "scanned virus").to eq false
-
end
-
end
-
end
-
-
1
describe "#virus_scan_service_name" do
-
1
it do
-
1
expect(subject.virus_scan_service_name).to eq Hydra::Works.default_system_virus_scanner.name
-
end
-
end
-
-
1
describe "#virus_scan_skipped?" do
-
1
context "if scan_result blank" do
-
1
it "returns false" do
-
1
expect(subject.virus_scan_skipped? scan_result: "").to eq false
-
end
-
end
-
-
1
context "if scan_result does not start with 'scan skipped'" do
-
1
it "returns false" do
-
1
expect(subject.virus_scan_skipped? scan_result: "scanning the surrounding parsecs").to eq false
-
end
-
end
-
-
1
context "if scan_result does start with 'scan skipped'" do
-
1
it "returns true" do
-
1
expect(subject.virus_scan_skipped? scan_result: "scan skipped, yippee ki yi yay").to eq true
-
end
-
end
-
end
-
-
1
describe "#virus_scan_timestamp_now" do
-
1
before {
-
1
allow(Time).to receive(:now).and_return(DateTime.new(2025, 2, 3, 4, 5, 6))
-
}
-
1
it do
-
1
expect(subject.virus_scan_timestamp_now).to eq "2025-02-03 04:05:06"
-
end
-
end
-
-
end
-
1
require 'rails_helper'
-
-
1
class MockOutput
-
1
def puts text
-
text
-
end
-
end
-
-
1
class MockFileSet
-
1
def original_name
-
1
"original name"
-
end
-
-
1
def to_s
-
"file name"
-
end
-
end
-
-
1
RSpec.describe Deepblue::YamlPopulateService do
-
68
subject { described_class.new }
-
-
1
def expected_attribute_names_ignore
-
3
%w[ access_control_id
-
collection_type_gid
-
file_size
-
head
-
part_of tail
-
thumbnail_id ]
-
end
-
-
1
def expected_attribute_names_user_ignore
-
1
%w[ current_sign_in_at
-
current_sign_in_ip
-
reset_password_token
-
reset_password_sent_at ]
-
end
-
-
1
describe 'constants' do
-
1
it do
-
1
expect( Deepblue::YamlPopulateService::DEFAULT_CREATE_ZERO_LENGTH_FILES ).to eq true
-
1
expect( Deepblue::YamlPopulateService::DEFAULT_OVERWRITE_EXPORT_FILES ).to eq true
-
end
-
end
-
-
1
describe "#initialize" do
-
1
it "sets instance variables" do
-
1
instance_variable_get(:@create_zero_length_files) == true
-
1
instance_variable_get(:@mode) == 'build'
-
-
1
instance_variable_get(:@overwrite_export_files) == true
-
1
instance_variable_get(:@source) == 'DBDv2'
-
1
instance_variable_get(:@total_collections_exported) == 0
-
1
instance_variable_get(:@total_file_sets_exported) == 0
-
1
instance_variable_get(:@total_file_sets_size_exported) == 0
-
1
instance_variable_get(:@total_works_exported) == 0
-
1
instance_variable_get(:@total_users_exported) == 0
-
end
-
end
-
-
1
describe "#yaml_body_collections" do
-
1
context do
-
1
concern = OpenStruct.new(id: "XYZ-1000", edit_users: "editors", collection_type: OpenStruct.new(machine_id: "HAL"),
-
work_ids: [101,202,303,404], total_file_size: 203, visibility: "public")
-
-
1
before {
-
1
allow(subject).to receive(:yaml_item).with "out", "concave", ":id:", "XYZ-1000"
-
1
allow(subject).to receive(:source).and_return 'DBDv2'
-
1
allow(subject).to receive(:yaml_item).with "out", "concave", ":collection_type:", "HAL", escape: true
-
1
allow(subject).to receive(:yaml_item).with "out", "concave", ":edit_users:", "editors", escape: true
-
1
allow(subject).to receive(:yaml_item_prior_identifier).with "out", "concave", curation_concern: concern
-
1
allow(subject).to receive(:yaml_item_subject).with "out", "concave", curation_concern: concern
-
-
1
allow(subject).to receive(:yaml_item).with "out", "concave", ":total_work_count:", 4
-
1
allow(subject).to receive(:yaml_item).with "out", "concave", ":total_file_size:", 203
-
-
1
allow(subject).to receive(:human_readable_size).with( 203 ).and_return "203 MB"
-
1
allow(subject).to receive(:yaml_item).with "out", "concave", ":total_file_size_human_readable:", "203 MB", escape: true
-
1
allow(subject).to receive(:yaml_item).with "out", "concave", ":visibility:", "public"
-
-
1
allow(subject).to receive(:attribute_names_collection).and_return %w[ prior_identifier rights rights_license
-
subject subject_discipline total_file_size visionary ]
-
1
allow(subject).to receive(:yaml_item_collection).with "out", "concave", concern, name: "visionary"
-
}
-
-
1
it "calls functions with curation_concern values" do
-
1
instance_variable_get(:@total_collections_exported) == 0
-
-
1
expect(subject).to receive(:yaml_item).with "out", "concave", ":id:", "XYZ-1000"
-
1
expect(subject).to receive(:yaml_item).with "out", "concave", ":collection_type:", "HAL", escape: true
-
1
expect(subject).to receive(:yaml_item).with "out", "concave", ":edit_users:", "editors", escape: true
-
1
expect(subject).to receive(:yaml_item_prior_identifier).with "out", "concave", curation_concern: concern
-
1
expect(subject).to receive(:yaml_item_subject).with "out", "concave", curation_concern: concern
-
-
1
expect(subject).to receive(:yaml_item).with "out", "concave", ":total_work_count:", 4
-
1
expect(subject).to receive(:yaml_item).with "out", "concave", ":total_file_size:", 203
-
1
expect(subject).to receive(:yaml_item).with "out", "concave", ":total_file_size_human_readable:", "203 MB", escape: true
-
1
expect(subject).to receive(:yaml_item).with "out", "concave", ":visibility:", "public"
-
-
1
expect(subject).to receive(:yaml_item_collection).with "out", "concave", concern, name: "visionary"
-
-
1
subject.yaml_body_collections "out", indent: "concave", curation_concern: concern
-
end
-
end
-
end
-
-
1
describe "#yaml_file_size" do
-
1
context "when file_set.file_size is blank and file_set.original_file is nil" do
-
1
it "returns 0" do
-
1
fileset = OpenStruct.new(file_size: "", original_file: nil)
-
1
expect(subject.yaml_file_size fileset).to eq 0
-
end
-
end
-
-
1
context "when file_set.file_size is blank and file_set.original_file is not nil" do
-
1
it "returns file_set.original_file.size" do
-
1
fileset = OpenStruct.new(file_size: "", original_file: OpenStruct.new(size: 80))
-
1
expect(subject.yaml_file_size fileset).to eq 80
-
end
-
end
-
-
1
context "when file_set.file_size is not blank" do
-
1
it "returns file_set.file_size[0]" do
-
1
fileset = OpenStruct.new(file_size: [9], original_file: OpenStruct.new(size: 80))
-
1
expect(subject.yaml_file_size fileset).to eq 9
-
end
-
end
-
end
-
-
1
describe "#yaml_body_files" do
-
1
before {
-
2
allow(subject).to receive(:yaml_line).with("out", "indent", ":file_set_ids:")
-
}
-
-
1
context "curation_concern.file_sets.count is not positive" do
-
1
it "calls yaml_line once" do
-
1
expect(subject).to receive(:yaml_line).with("out", "indent", ":file_set_ids:")
-
-
1
concern = OpenStruct.new(file_sets: [])
-
1
subject.yaml_body_files "out", indent_base: "base ", indent: "indent", curation_concern: concern, target_dirname: "target"
-
end
-
end
-
-
1
context "curation_concern.file_sets.count is positive" do
-
1
fileset1 = OpenStruct.new(id: 1, title: ["Why we are No. 1"], edit_users: "admin users", mime_type: "text/plain", original_checksum: ["beauty","beast"],
-
original_file: OpenStruct.new(original_name: "When we were No. 2"), visibility: "public")
-
1
concern = OpenStruct.new(file_sets: [fileset1])
-
# TODO: put YamlPopulateService.rb edits in earlier commit
-
#
-
-
1
before {
-
1
allow(subject).to receive(:yaml_item).with("out", "baseindent-", "", 1, escape: true)
-
1
allow(subject).to receive(:mode).and_return "migrate"
-
1
allow(subject).to receive(:log_provenance_migrate).with( curation_concern: fileset1, parent: concern )
-
1
allow(subject).to receive(:yaml_file_set_id).with(fileset1).and_return "file id 1"
-
1
allow(subject).to receive(:yaml_line).with("out", "indent", ":file id 1:")
-
1
allow(subject).to receive(:yaml_item).with("out", "baseindent", ":id:", 1, escape: true)
-
1
allow(subject).to receive(:yaml_item).with("out", "baseindent", ':title:', ["Why we are No. 1"], escape: true, single_value: true )
-
1
allow(subject).to receive(:yaml_item_prior_identifier).with("out", "baseindent", curation_concern: fileset1 )
-
1
allow(subject).to receive(:yaml_export_file_path).with(target_dirname: "target", file_set: fileset1 ).and_return "filepath"
-
1
allow(subject).to receive(:yaml_item).with( "out", "baseindent", ':file_path:', "filepath", escape: true )
-
1
allow(subject).to receive(:yaml_file_set_checksum).with( file_set: fileset1 ).and_return OpenStruct.new(algorithm: "11", value: "111*")
-
1
allow(subject).to receive(:yaml_item).with( "out", "baseindent", ":checksum_algorithm:", "11", escape: true )
-
1
allow(subject).to receive(:yaml_item).with( "out", "baseindent", ":checksum_value:", "111*", escape: true )
-
1
allow(subject).to receive(:yaml_item).with( "out", "baseindent", ":edit_users:", "admin users", escape: true )
-
1
allow(subject).to receive(:yaml_file_size).with( fileset1 ).and_return "76"
-
1
allow(subject).to receive(:yaml_item).with( "out", "baseindent", ":file_size:", "76" )
-
1
allow(subject).to receive(:human_readable_size).with( "76" ).and_return "76 kb"
-
1
allow(subject).to receive(:yaml_item).with( "out", "baseindent", ":file_size_human_readable:", "76 kb", escape: true )
-
1
allow(subject).to receive(:yaml_item).with( "out", "baseindent", ":mime_type:", "text/plain", escape: true )
-
1
allow(subject).to receive(:yaml_item).with( "out", "baseindent", ":original_checksum:", "beauty" )
-
1
allow(subject).to receive(:yaml_item).with( "out", "baseindent", ":original_name:", "When we were No. 2", escape: true )
-
1
allow(subject).to receive(:yaml_item).with( "out", "baseindent", ":visibility:", "public" )
-
-
1
allow(subject).to receive(:attribute_names_file_set).and_return %w[ title file_size irascible ]
-
1
allow(subject).to receive(:yaml_item_file_set).with( "out", "baseindent", fileset1, name: "irascible" )
-
}
-
1
it "calls yaml_item for each file set in curation_concern" do
-
1
expect(subject).to receive(:yaml_line).with("out", "indent", ":file_set_ids:")
-
1
expect(subject).to receive(:yaml_item).with("out", "baseindent-", "", 1, escape: true)
-
-
1
expect(subject).to receive(:log_provenance_migrate).with( curation_concern: fileset1, parent: concern )
-
1
expect(subject).to receive(:yaml_line).with("out", "indent", ":file id 1:")
-
1
expect(subject).to receive(:yaml_item).with("out", "baseindent", ":id:", 1, escape: true)
-
1
expect(subject).to receive(:yaml_item).with("out", "baseindent", ':title:', ["Why we are No. 1"], escape: true, single_value: true )
-
1
expect(subject).to receive(:yaml_item_prior_identifier).with("out", "baseindent", curation_concern: fileset1 )
-
1
expect(subject).to receive(:yaml_export_file_path).with(target_dirname: "target", file_set: fileset1 )
-
1
expect(subject).to receive(:yaml_item).with( "out", "baseindent", ':file_path:', "filepath", escape: true )
-
1
expect(subject).to receive(:yaml_file_set_checksum).with( file_set: fileset1 )
-
1
expect(subject).to receive(:yaml_item).with( "out", "baseindent", ":checksum_algorithm:", "11", escape: true )
-
1
expect(subject).to receive(:yaml_item).with( "out", "baseindent", ":checksum_value:", "111*", escape: true )
-
1
expect(subject).to receive(:yaml_item).with( "out", "baseindent", ":edit_users:", "admin users", escape: true )
-
1
expect(subject).to receive(:yaml_item).with( "out", "baseindent", ":file_size:", "76" )
-
1
expect(subject).to receive(:yaml_item).with( "out", "baseindent", ":file_size_human_readable:", "76 kb", escape: true )
-
1
expect(subject).to receive(:yaml_item).with( "out", "baseindent", ":mime_type:", "text/plain", escape: true )
-
1
expect(subject).to receive(:yaml_item).with( "out", "baseindent", ":original_checksum:", "beauty" )
-
1
expect(subject).to receive(:yaml_item).with( "out", "baseindent", ":original_name:", "When we were No. 2", escape: true )
-
1
expect(subject).to receive(:yaml_item).with( "out", "baseindent", ":visibility:", "public" )
-
1
expect(subject).to receive(:yaml_item_file_set).with( "out", "baseindent", fileset1, name: "irascible" )
-
-
1
subject.yaml_body_files "out", indent_base: "base", indent: "indent", curation_concern: concern, target_dirname: "target"
-
-
1
subject.instance_variable_get(:@total_file_sets_exported) == 1
-
1
subject.instance_variable_get(:@total_file_sets_size_exported) == 76
-
end
-
end
-
end
-
-
1
describe "#yaml_body_user_body" do
-
1
user = OpenStruct.new(email: 'email z')
-
1
before {
-
1
allow(subject).to receive(:yaml_user_email).with(user).and_return "user_email_z"
-
1
allow(subject).to receive(:yaml_line).with( "out", "indent", ":user_email_z:")
-
1
allow(subject).to receive(:yaml_item).with( "out", "base indent", ":email:", "email z", escape: true)
-
1
allow(subject).to receive(:attribute_names_user).and_return ["snail mail", "email"]
-
1
allow(subject).to receive(:yaml_item_user).with("out", "base indent", user, name: "snail mail")
-
}
-
-
1
it "calls various functions" do
-
1
expect(subject).to receive(:yaml_user_email).with(user).and_return "user_email_z"
-
1
expect(subject).to receive(:yaml_line).with( "out", "indent", ":user_email_z:")
-
1
expect(subject).to receive(:yaml_item).with( "out", "base indent", ":email:", "email z", escape: true)
-
1
expect(subject).to receive(:attribute_names_user).and_return ["snail mail", "email"]
-
1
expect(subject).to receive(:yaml_item_user).with("out", "base indent", user, name: "snail mail")
-
-
1
subject.yaml_body_user_body "out", indent_base: "base ", indent: "indent", user: user
-
-
1
subject.instance_variable_get(:@total_users_exported) == 1
-
end
-
end
-
-
1
describe "#yaml_body_users" do
-
1
before {
-
2
allow(subject).to receive(:yaml_line).with( "out", "indent", ':user_emails:' )
-
}
-
-
1
context "has users" do
-
1
before {
-
1
allow(subject).to receive(:yaml_item).with( "out", "indent", ":total_user_count:", 3 )
-
-
1
allow(subject).to receive(:yaml_item).with( "out", "baseindent-", "", "email1", escape: true )
-
1
allow(subject).to receive(:yaml_item).with( "out", "baseindent-", "", "email2", escape: true )
-
1
allow(subject).to receive(:yaml_item).with( "out", "baseindent-", "", "email3", escape: true )
-
}
-
1
it "calls yaml_item, yaml_line, and yaml_item again for each user" do
-
1
users = [OpenStruct.new(email: 'email1'), OpenStruct.new(email: 'email2'), OpenStruct.new(email: 'email3')]
-
1
subject.yaml_body_users "out", indent_base: "base", indent: "indent", users: users
-
end
-
end
-
-
1
context "has no users" do
-
1
before {
-
1
allow(subject).to receive(:yaml_item).with( "out", "indent", ":total_user_count:", 0 )
-
}
-
1
it "calls yaml_item and yaml_line" do
-
1
subject.yaml_body_users "out", indent_base: "base", indent: "indent", users: []
-
end
-
end
-
end
-
-
1
describe "#yaml_body_works" do
-
1
context "when result of attribute_names_work is in skip array" do
-
1
concern = OpenStruct.new(id: "id", admin_set_id: "admin set id", edit_users: "edit users", file_set_ids: ["id1", "id2"],
-
total_file_size: 2, visibility: "promotional")
-
1
before {
-
2
allow(subject).to receive(:human_readable_size).with(2).and_return "a goodly number of 2"
-
2
allow(subject).to receive(:attribute_names_work).and_return %w[ prior_identifier rights rights_license subject subject_discipline total_file_size ]
-
-
2
allow(subject).to receive(:yaml_item).with( "out", "indent", ":id:", "id")
-
2
allow(subject).to receive(:yaml_item).with( "out", "indent", ":admin_set_id:", "admin set id", escape: true )
-
2
allow(subject).to receive(:yaml_item).with( "out", "indent", ":edit_users:", "edit users", escape: true )
-
-
2
allow(subject).to receive(:yaml_item_prior_identifier).with( "out", "indent", curation_concern: concern )
-
2
allow(subject).to receive(:yaml_item_rights).with( "out", "indent", curation_concern: concern )
-
2
allow(subject).to receive(:yaml_item_subject).with( "out", "indent", curation_concern: concern )
-
-
2
allow(subject).to receive(:yaml_item).with( "out", "indent", ":total_file_count:", 2 )
-
2
allow(subject).to receive(:yaml_item).with( "out", "indent", ":total_file_size:", 2 )
-
2
allow(subject).to receive(:yaml_item).with( "out", "indent", ":total_file_size_human_readable:", "a goodly number of 2", escape: true )
-
2
allow(subject).to receive(:yaml_item).with( "out", "indent", ":visibility:", "promotional")
-
}
-
1
it "skips yaml_item_work" do
-
1
expect(subject).to receive(:yaml_item).with( "out", "indent", ":id:", "id")
-
1
expect(subject).to receive(:yaml_item).with( "out", "indent", ":admin_set_id:", "admin set id", escape: true )
-
1
expect(subject).to receive(:yaml_item).with( "out", "indent", ":edit_users:", "edit users", escape: true )
-
-
1
expect(subject).to receive(:yaml_item_prior_identifier).with( "out", "indent", curation_concern: concern )
-
1
expect(subject).to receive(:yaml_item_rights).with( "out", "indent", curation_concern: concern )
-
1
expect(subject).to receive(:yaml_item_subject).with( "out", "indent", curation_concern: concern )
-
-
1
expect(subject).to receive(:yaml_item).with( "out", "indent", ":total_file_count:", 2 )
-
1
expect(subject).to receive(:yaml_item).with( "out", "indent", ":total_file_size:", 2 )
-
1
expect(subject).to receive(:yaml_item).with( "out", "indent", ":total_file_size_human_readable:", "a goodly number of 2", escape: true )
-
1
expect(subject).to receive(:yaml_item).with( "out", "indent", ":visibility:", "promotional")
-
-
1
subject.yaml_body_works "out", indent: "indent", curation_concern: concern
-
-
1
subject.instance_variable_get(:@total_works_exported) == 1
-
end
-
-
1
context "when result of attribute_names_work is not in skip array" do
-
1
before {
-
1
allow(subject).to receive(:attribute_names_work).and_return ["unidentifiable"]
-
1
allow(subject).to receive(:yaml_item_work).with( "out", "indent", concern, name: "unidentifiable")
-
}
-
-
1
it "calls yaml_item_work" do
-
1
expect(subject).to receive(:yaml_item_work).with( "out", "indent", concern, name: "unidentifiable")
-
-
1
subject.yaml_body_works "out", indent: "indent", curation_concern: concern
-
end
-
end
-
end
-
end
-
-
1
describe "#yaml_escape_value" do
-
1
context "when value argument is nil" do
-
1
it "returns blank" do
-
1
expect(subject.yaml_escape_value nil).to be_blank
-
end
-
end
-
-
1
context "when value argument is not nil and escape is false" do
-
1
it "returns value argument" do
-
1
expect(subject.yaml_escape_value "valuable").to eq "valuable"
-
end
-
end
-
-
1
context "when value argument is not nil and comment and escape are true" do
-
1
it "returns value argument" do
-
1
expect(subject.yaml_escape_value "valuable", comment: true, escape: true).to eq "valuable"
-
end
-
end
-
-
1
context "when value argument is not nil and escape is true and comment is false" do
-
1
it "returns value argument as json" do
-
1
expect(subject.yaml_escape_value "valuable", comment: false, escape: true).to eq "\"valuable\""
-
end
-
end
-
-
1
context "when value argument is blank and escape is true and comment is false" do
-
1
it "returns blank" do
-
1
expect(subject.yaml_escape_value "", comment: false, escape: true).to be_blank
-
end
-
end
-
end
-
-
1
describe "#yaml_export_file_path" do
-
1
file_set = OpenStruct.new(id: 'file set id')
-
1
before {
-
1
allow(subject).to receive(:yaml_export_file_name).with(file_set: file_set).and_return "export file name "
-
}
-
1
it "returns string" do
-
1
expect(subject.yaml_export_file_path target_dirname: ["dirname1 ", "dirname2 "], file_set: file_set)
-
.to eq "dirname1 file set id_export file name dirname2 "
-
end
-
end
-
-
# NOTE: if/else in function doesn't make a difference
-
1
describe "#yaml_export_file_name" do
-
1
fileset_arg = OpenStruct.new(title: ["*file^", "set"])
-
1
context "when file is nil" do
-
1
before {
-
1
allow(Deepblue::MetadataHelper).to receive(:file_from_file_set).with(fileset_arg).and_return nil
-
}
-
1
it "returns filename appropriate string" do
-
1
expect(subject.yaml_export_file_name file_set: fileset_arg).to eq "_file_"
-
end
-
end
-
-
1
context "when file is not nil" do
-
1
before {
-
1
allow(Deepblue::MetadataHelper).to receive(:file_from_file_set).with(fileset_arg).and_return MockFileSet.new
-
}
-
1
it "returns filename appropriate string" do
-
1
expect(subject.yaml_export_file_name file_set: fileset_arg).to eq "_file_"
-
end
-
end
-
end
-
-
1
describe "#yaml_file_set_checksum" do
-
1
context "when file present" do
-
1
before {
-
1
allow(Deepblue::MetadataHelper).to receive(:file_from_file_set).with("fileset").and_return OpenStruct.new(checksum: 'check sum')
-
}
-
1
it "returns file.checksum" do
-
1
expect(subject.yaml_file_set_checksum file_set: "fileset").to eq "check sum"
-
end
-
end
-
-
1
context "when file not present" do
-
1
before {
-
1
allow(Deepblue::MetadataHelper).to receive(:file_from_file_set).with("fileset").and_return nil
-
}
-
1
it "returns nil" do
-
1
expect(subject.yaml_file_set_checksum file_set: "fileset").to be_blank
-
end
-
end
-
end
-
-
1
pending "#yaml_filename"
-
-
1
describe "#yaml_filename_collection" do
-
1
before {
-
1
allow(subject).to receive(:yaml_filename).with pathname_dir: "pathname", id: "ID", prefix: 'c_', task: "populate"
-
}
-
1
it "calls yaml_filename" do
-
1
expect(subject).to receive(:yaml_filename).with pathname_dir: "pathname", id: "ID", prefix: 'c_', task: "populate"
-
-
1
subject.yaml_filename_collection pathname_dir: "pathname", collection: OpenStruct.new(id: 'ID'), task: 'populate'
-
end
-
end
-
-
1
describe "#yaml_filename_users" do
-
1
before {
-
1
allow(subject).to receive(:yaml_filename).with pathname_dir: "pathname", id: "", prefix: 'users', task: "populate"
-
}
-
1
it "calls yaml_filename" do
-
1
expect(subject).to receive(:yaml_filename).with pathname_dir: "pathname", id: "", prefix: 'users', task: "populate"
-
-
1
subject.yaml_filename_users pathname_dir: "pathname", task: 'populate'
-
end
-
end
-
-
1
describe "#yaml_filename_work" do
-
1
before {
-
1
allow(subject).to receive(:yaml_filename).with pathname_dir: "pathname", id: "identify", prefix: 'w_', task: "populate"
-
}
-
1
it "calls yaml_filename" do
-
1
expect(subject).to receive(:yaml_filename).with pathname_dir: "pathname", id: "identify", prefix: 'w_', task: "populate"
-
-
1
subject.yaml_filename_work pathname_dir: "pathname", work: OpenStruct.new(id: 'identify'), task: 'populate'
-
end
-
end
-
-
1
describe "#yaml_header" do
-
1
before {
-
1
allow(DateTime).to receive(:now).and_return DateTime.new(2025, 6, 6, 9, 10, 35)
-
1
allow(subject).to receive(:source).and_return "DBDv3"
-
1
allow(subject).to receive(:mode).and_return "modular"
-
-
1
allow(subject).to receive(:yaml_line).with "out", "indent", ':email:', "depositor"
-
1
allow(subject).to receive(:yaml_line).with "out", "indent", ':visibility:', "visible"
-
1
allow(subject).to receive(:yaml_line).with "out", "indent", ':ingester:', ''
-
1
allow(subject).to receive(:yaml_line).with "out", "indent", ':source:', "DBDv3"
-
1
allow(subject).to receive(:yaml_line).with "out", "indent", ':export_timestamp:', '2025-06-06T09:10:35+00:00'
-
1
allow(subject).to receive(:yaml_line).with "out", "indent", ':mode:', "modular"
-
1
allow(subject).to receive(:yaml_line).with "out", "indent", ':id:', "identifier"
-
1
allow(subject).to receive(:yaml_line).with "out", "indent", "header"
-
}
-
1
it "calls yaml_line multiple times with different arguments" do
-
1
expect(subject).to receive(:yaml_line).with "out", "indent", ':email:', "depositor"
-
1
expect(subject).to receive(:yaml_line).with "out", "indent", ':visibility:', "visible"
-
1
expect(subject).to receive(:yaml_line).with "out", "indent", ':ingester:', ''
-
1
expect(subject).to receive(:yaml_line).with "out", "indent", ':source:', "DBDv3"
-
1
expect(subject).to receive(:yaml_line).with "out", "indent", ':export_timestamp:', '2025-06-06T09:10:35+00:00'
-
1
expect(subject).to receive(:yaml_line).with "out", "indent", ':mode:', "modular"
-
1
expect(subject).to receive(:yaml_line).with "out", "indent", ':id:', "identifier"
-
1
expect(subject).to receive(:yaml_line).with "out", "indent", "header"
-
-
1
concern = OpenStruct.new(depositor: 'depositor', visibility: "visible", id: "identifier")
-
1
subject.yaml_header "out", indent: "indent", curation_concern: concern, header_type: "header"
-
end
-
end
-
-
1
describe "#yaml_header_populate" do
-
1
before {
-
1
allow(subject).to receive(:yaml_line).with "out", "indent", 'target', comment: true
-
1
allow(subject).to receive(:yaml_line).with "out", "indent", "bundle exec rake umrdr:populate[target]", comment: true
-
1
allow(subject).to receive(:yaml_line).with "out", "indent", "---"
-
1
allow(subject).to receive(:yaml_line).with "out", "indent", ":user:"
-
}
-
1
it "calls yaml_line multiple times with different arguments" do
-
1
expect(subject).to receive(:yaml_line).with "out", "indent", 'target', comment: true
-
1
expect(subject).to receive(:yaml_line).with "out", "indent", "bundle exec rake umrdr:populate[target]", comment: true
-
1
expect(subject).to receive(:yaml_line).with "out", "indent", "---"
-
1
expect(subject).to receive(:yaml_line).with "out", "indent", ":user:"
-
-
1
subject.yaml_header_populate "out", indent: "indent", target_filename: "target"
-
end
-
end
-
-
1
describe "#yaml_header_users" do
-
1
before {
-
1
allow(subject).to receive(:source).and_return "DBDv3"
-
1
allow(subject).to receive(:mode).and_return "modular"
-
1
allow(DateTime).to receive(:now).and_return DateTime.new(2025, 6, 6, 0, 0, 30)
-
-
1
allow(subject).to receive(:yaml_line).with "out", "indent", ':ingester:', ''
-
1
allow(subject).to receive(:yaml_line).with "out", "indent", ':source:', "DBDv3"
-
1
allow(subject).to receive(:yaml_line).with "out", "indent", ':export_timestamp:', '30'
-
1
allow(subject).to receive(:yaml_line).with "out", "indent", ':mode:', 'modular'
-
1
allow(subject).to receive(:yaml_line).with "out", "indent", ':users:'
-
}
-
1
it "calls yaml_line multiple times with different arguments" do
-
1
expect(subject).to receive(:yaml_line).with "out", "indent", ':ingester:', ''
-
1
expect(subject).to receive(:yaml_line).with "out", "indent", ':source:', "DBDv3"
-
1
expect(subject).to receive(:yaml_line).with "out", "indent", ':export_timestamp:', '2025-06-06T00:00:30+00:00'
-
1
expect(subject).to receive(:yaml_line).with "out", "indent", ':mode:', 'modular'
-
1
expect(subject).to receive(:yaml_line).with "out", "indent", ':users:'
-
-
1
subject.yaml_header_users "out", indent: "indent"
-
end
-
end
-
-
1
describe "#yaml_is_a_work?" do
-
1
context "when source is 'DBDv2'" do
-
1
before {
-
2
allow(subject).to receive(:source).and_return "DBDv2"
-
}
-
-
1
it "returns true when curation_concern is a DataSet" do
-
1
expect(subject.yaml_is_a_work? curation_concern: DataSet.new).to eq true
-
end
-
-
1
it "returns false when curation_concern is not a DataSet" do
-
1
expect(subject.yaml_is_a_work? curation_concern: GenericWork.new).to eq false
-
end
-
end
-
-
1
context "when source is not 'DBDv2'" do
-
1
before {
-
2
allow(subject).to receive(:source).and_return "DBDv1"
-
}
-
-
1
it "returns true when curation_concern is a GenericWork" do
-
1
expect(subject.yaml_is_a_work? curation_concern: GenericWork.new).to eq true
-
end
-
-
1
it "returns false when curation_concern is not a GenericWork" do
-
1
expect(subject.yaml_is_a_work? curation_concern: DataSet.new).to eq false
-
end
-
end
-
end
-
-
-
1
pending "#yaml_item"
-
-
1
describe "#yaml_item_collection" do
-
1
context "when ATTRIBUTE_NAMES_IGNORE includes name argument" do
-
1
it "returns blank" do
-
1
expected_attribute_names_ignore.each do |attr_name|
-
7
expect(subject.yaml_item_collection "out", "indent", "curation concern", name: attr_name).to be_blank
-
end
-
end
-
end
-
-
1
context "when ATTRIBUTE_NAMES_IGNORE doesn't include name argument" do
-
1
it "calls yaml_item" do
-
1
expect(subject).to receive(:yaml_item).with "out", "indent", ":banana:", "yellow", escape: true
-
1
subject.yaml_item_collection "out", "indent", { "banana" => "yellow"}, name: "banana"
-
end
-
end
-
-
1
context "when value blank and name not in ATTRIBUTE_NAMES_ALWAYS_INCLUDE_CC" do
-
1
it "returns blank" do
-
1
expect(subject.yaml_item_collection "out", "indent", { "banana" => "" }, name: "banana").to be_blank
-
end
-
end
-
end
-
-
1
describe "#yaml_item_file_set" do
-
1
context "when ATTRIBUTE_NAMES_IGNORE includes name argument" do
-
1
it "returns blank" do
-
1
expected_attribute_names_ignore.each do |attr_name|
-
7
expect(subject.yaml_item_file_set "out", "indent", "file set", name: attr_name).to be_blank
-
end
-
end
-
end
-
-
1
context "when ATTRIBUTE_NAMES_IGNORE doesn't include name argument" do
-
1
it "calls yaml_item" do
-
1
expect(subject).to receive(:yaml_item).with "out", "indent", ":apple:", "red", escape: true
-
1
subject.yaml_item_file_set "out", "indent", { "apple" => "red"}, name: "apple"
-
end
-
end
-
-
1
context "when value blank and name not in ATTRIBUTE_NAMES_ALWAYS_INCLUDE_FILE_SET" do
-
1
it "returns blank" do
-
1
expect(subject.yaml_item_file_set "out", "indent", { "apple" => "" }, name: "apple").to be_blank
-
end
-
end
-
end
-
-
1
describe "#yaml_item_prior_identifier" do
-
1
context "when source is 'DBDv1'" do
-
1
before {
-
1
allow(subject).to receive(:source).and_return "DBDv1"
-
}
-
1
it "calls yaml_item without curation_concern.prior_identifier" do
-
1
expect(subject).to receive(:yaml_item).with "out", "indent", ":prior_identifier:", ""
-
1
subject.yaml_item_prior_identifier "out", "indent", curation_concern: "concern"
-
end
-
end
-
-
1
context "when source is not 'DBDv1'" do
-
1
before {
-
1
allow(subject).to receive(:source).and_return "DBDv2"
-
}
-
1
it "calls yaml_item with curation_concern.prior_identifier" do
-
1
expect(subject).to receive(:yaml_item).with "out", "indent", ":prior_identifier:", "concern identifier"
-
1
subject.yaml_item_prior_identifier "out", "indent", curation_concern: OpenStruct.new(prior_identifier: 'concern identifier')
-
end
-
end
-
end
-
-
1
describe "#yaml_item_referenced_by" do
-
1
context "when source is 'DBDv1'" do
-
1
before {
-
1
allow(subject).to receive(:source).and_return "DBDv1"
-
}
-
1
it "calls yaml_item with subject" do
-
1
expect(subject).to receive(:yaml_item).with "out", "indent", ":isReferencedBy:", "reference concern", escape: true
-
1
subject.yaml_item_referenced_by "out", "indent", curation_concern: OpenStruct.new(isReferencedBy: 'reference concern')
-
end
-
end
-
-
1
context "when source is not 'DBDv1'" do
-
1
before {
-
1
allow(subject).to receive(:source).and_return "DBDv2"
-
}
-
1
it "calls yaml_item with subject_discipline" do
-
1
expect(subject).to receive(:yaml_item).with "out", "indent", ":referenced_by:", "referral concern", escape: true
-
1
subject.yaml_item_referenced_by "out", "indent", curation_concern: OpenStruct.new(referenced_by: 'referral concern')
-
end
-
end
-
end
-
-
1
describe "#yaml_item_rights" do
-
1
context "when source is 'DBDv1'" do
-
1
before {
-
1
allow(subject).to receive(:source).and_return "DBDv1"
-
}
-
1
it "calls yaml_item with subject" do
-
1
expect(subject).to receive(:yaml_item).with "out", "indent", ":rights:", "rights concern", escape: true
-
1
subject.yaml_item_rights "out", "indent", curation_concern: OpenStruct.new(rights: 'rights concern')
-
end
-
end
-
-
1
context "when source is not 'DBDv1'" do
-
1
before {
-
1
allow(subject).to receive(:source).and_return "DBDv2"
-
}
-
1
it "calls yaml_item with subject_discipline" do
-
1
expect(subject).to receive(:yaml_item).with "out", "indent", ":rights_license:", "license", escape: true
-
1
subject.yaml_item_rights "out", "indent", curation_concern: OpenStruct.new(rights_license: 'license')
-
end
-
end
-
end
-
-
1
describe "#yaml_item_subject" do
-
1
context "when source is 'DBDv1'" do
-
1
before {
-
1
allow(subject).to receive(:source).and_return "DBDv1"
-
}
-
1
it "calls yaml_item with subject" do
-
1
expect(subject).to receive(:yaml_item).with "out", "indent", ":subject:", "subject concern", escape: true
-
1
subject.yaml_item_subject "out", "indent", curation_concern: OpenStruct.new(subject: 'subject concern')
-
end
-
end
-
-
1
context "when source is not 'DBDv1'" do
-
1
before {
-
1
allow(subject).to receive(:source).and_return "DBDv2"
-
}
-
1
it "calls yaml_item with subject_discipline" do
-
1
expect(subject).to receive(:yaml_item).with "out", "indent", ":subject_discipline:", "discipline", escape: true
-
1
subject.yaml_item_subject "out", "indent", curation_concern: OpenStruct.new(subject_discipline: 'discipline')
-
end
-
end
-
end
-
-
1
describe "#yaml_item_user" do
-
1
context "when ATTRIBUTE_NAMES_USER_IGNORE includes name argument" do
-
1
it "returns blank" do
-
1
expected_attribute_names_user_ignore.each do |attr_name|
-
4
expect(subject.yaml_item_user "out", "indent", "user", name: attr_name).to be_blank
-
end
-
end
-
end
-
-
1
context "when ATTRIBUTE_NAMES_USER_IGNORE doesn't include name argument" do
-
1
it "calls yaml_item" do
-
1
expect(subject).to receive(:yaml_item).with "out", "indent", ":frosting:", "cream cheese", escape: true
-
1
subject.yaml_item_user "out", "indent", { "frosting" => "cream cheese"}, name: "frosting"
-
end
-
end
-
-
1
context "when value blank and name not in ATTRIBUTE_NAMES_ALWAYS_INCLUDE_USER" do
-
1
it "returns blank" do
-
1
expect(subject.yaml_item_user "out", "indent", { "frosting" => "" }, name: "frosting").to be_blank
-
end
-
end
-
end
-
-
1
describe "#yaml_item_work" do
-
1
context "when ATTRIBUTE_NAMES_IGNORE includes name argument" do
-
1
it "returns blank" do
-
1
expected_attribute_names_ignore.each do |attr_name|
-
7
expect(subject.yaml_item_work "out", "indent", "curation concern", name: attr_name).to be_blank
-
end
-
end
-
end
-
-
1
context "when ATTRIBUTE_NAMES_IGNORE doesn't include name argument" do
-
1
it "calls yaml_item" do
-
1
expect(subject).to receive(:yaml_item).with "out", "indent", ":cake:", "chocolate", escape: true
-
1
subject.yaml_item_work "out", "indent", { "cake" => "chocolate" }, name: "cake"
-
end
-
end
-
-
1
context "when value blank and name not in ATTRIBUTE_NAMES_ALWAYS_INCLUDE_CC" do
-
1
it "returns blank" do
-
1
expect(subject.yaml_item_work "out", "indent", { "cake" => "" }, name: "cake").to be_blank
-
end
-
end
-
end
-
-
1
describe "#yaml_line" do
-
1
context "when comment is false" do
-
1
before {
-
1
allow(subject).to receive(:yaml_escape_value).with('', comment: false, escape: false).and_return "yaml escape value"
-
}
-
1
it "outputs text" do
-
1
out_object = MockOutput.new
-
1
expect(out_object).to receive(:puts).with("indent label yaml escape value")
-
1
subject.yaml_line out_object, "indent ", "label", comment: false
-
end
-
end
-
-
1
context "when comment is true" do
-
1
before {
-
1
allow(subject).to receive(:yaml_escape_value).with('', comment: true, escape: false).and_return "comment"
-
}
-
1
it "outputs text with preceding hashtag" do
-
1
out_object = MockOutput.new
-
1
expect(out_object).to receive(:puts).with("# indent label comment")
-
1
subject.yaml_line out_object, "indent ", "label", comment: true
-
end
-
end
-
end
-
-
1
describe "#yaml_populate_collection" do
-
1
object1 = OpenStruct.new(id: 111)
-
1
object2 = OpenStruct.new(id: 222)
-
1
concern_objects = OpenStruct.new(member_objects: [object1, object2])
-
1
concern_empty = OpenStruct.new(member_objects: [])
-
-
1
context "when out argument is not nil" do
-
1
before {
-
3
allow(subject).to receive(:log_provenance_migrate).with( curation_concern: concern_objects )
-
3
allow(subject).to receive(:yaml_header_populate).with( "outboard", indent: "", target_filename: "filename" )
-
3
allow(subject).to receive(:yaml_header).with( "outboard", indent: " ", curation_concern: concern_objects,
-
header_type: ':collections:')
-
3
allow(subject).to receive(:yaml_body_collections).with( "outboard", indent: " ", curation_concern: concern_objects)
-
}
-
-
1
context "when populate_works is false" do
-
1
before {
-
1
allow(subject).to receive(:mode).and_return "work"
-
}
-
1
it "calls various methods" do
-
1
expect(subject).not_to receive(:log_provenance_migrate)
-
1
expect(subject).to receive(:yaml_header_populate).with( "outboard", indent: "", target_filename: "filename" )
-
1
expect(subject).to receive(:yaml_header).with( "outboard", indent: " ", curation_concern: concern_objects,
-
header_type: ':collections:')
-
1
expect(subject).to receive(:yaml_body_collections).with( "outboard", indent: " ", curation_concern: concern_objects)
-
-
1
subject.yaml_populate_collection collection: concern_objects, out: "outboard", populate_works: false, target_filename: "filename"
-
end
-
end
-
-
1
context "when populate_works is true and collection.member_objects has no values" do
-
1
before {
-
1
allow(subject).to receive(:mode).and_return "work"
-
1
allow(subject).to receive(:log_provenance_migrate).with( curation_concern: concern_empty )
-
1
allow(subject).to receive(:yaml_header).with( "outboard", indent: " ", curation_concern: concern_empty,
-
header_type: ':collections:')
-
1
allow(subject).to receive(:yaml_body_collections).with( "outboard", indent: " ", curation_concern: concern_empty)
-
}
-
1
it "calls various methods" do
-
1
expect(subject).not_to receive(:log_provenance_migrate)
-
1
expect(subject).to receive(:yaml_header_populate).with( "outboard", indent: "", target_filename: "filename" )
-
1
expect(subject).to receive(:yaml_header).with( "outboard", indent: " ", curation_concern: concern_empty,
-
header_type: ':collections:')
-
1
expect(subject).to receive(:yaml_body_collections).with( "outboard", indent: " ", curation_concern: concern_empty)
-
-
1
subject.yaml_populate_collection collection: concern_empty, out: "outboard", populate_works: true, target_filename: "filename"
-
end
-
end
-
-
1
context "when populate_works is true and collection.member_objects has value(s) and mode is MetadataHelper::MODE_MIGRATE" do
-
1
before {
-
1
allow(subject).to receive(:mode).and_return "migrate"
-
-
1
allow(subject).to receive(:yaml_line).with( "outboard", " ", ':works:' )
-
1
allow(subject).to receive(:yaml_is_a_work?).with( curation_concern: object1 ).and_return true
-
1
allow(subject).to receive(:yaml_is_a_work?).with( curation_concern: object2 ).and_return false
-
1
allow(subject).to receive(:yaml_item).with( "outboard", " -", '', 111, escape: true )
-
1
allow(subject).to receive(:yaml_line).with( "outboard", " ", ":works_111:" )
-
1
allow(subject).to receive(:log_provenance_migrate).with( curation_concern: object1, parent: concern_objects )
-
1
allow(subject).to receive(:yaml_body_works).with( "outboard", indent: " ", curation_concern: object1 )
-
1
allow(subject).to receive(:yaml_body_files).with( "outboard", indent_base: " ", indent: " ",
-
curation_concern: object1, target_dirname: "directoryname")
-
}
-
1
it "calls various methods" do
-
1
expect(subject).to receive(:log_provenance_migrate).with( curation_concern: concern_objects )
-
1
expect(subject).to receive(:yaml_header_populate).with( "outboard", indent: "", target_filename: "filename" )
-
1
expect(subject).to receive(:yaml_header).with( "outboard", indent: " ", curation_concern: concern_objects,
-
header_type: ':collections:')
-
1
expect(subject).to receive(:yaml_body_collections).with( "outboard", indent: " ", curation_concern: concern_objects)
-
# expect(subject).to receive(:yaml_line).with( "outboard", " ", ':works:' )
-
-
1
expect(subject).to receive(:yaml_item).with( "outboard", " -", '', 111, escape: true )
-
1
expect(subject).not_to receive(:yaml_item).with( "outboard", " -", '', 222, escape: true )
-
1
expect(subject).to receive(:yaml_line).with( "outboard", " ", ":works_111:" )
-
1
expect(subject).not_to receive(:yaml_line).with( "outboard", " ", ":works_222:" )
-
1
expect(subject).to receive(:log_provenance_migrate).with( curation_concern: object1, parent: concern_objects )
-
1
expect(subject).not_to receive(:log_provenance_migrate).with( curation_concern: object2, parent: concern_objects )
-
-
1
expect(subject).to receive(:yaml_body_works).with( "outboard", indent: " ", curation_concern: object1 )
-
# expect(subject).to receive(:yaml_body_files).with( "outboard", indent_base: " ", indent: " ",
-
# curation_concern: object1, target_dirname: "directoryname")
-
1
expect(subject).not_to receive(:yaml_body_works).with( "outboard", indent: " ", curation_concern: object2 )
-
1
expect(subject).not_to receive(:yaml_body_files).with( "outboard", indent_base: " ", indent: " ",
-
curation_concern: object2, target_dirname: "directoryname")
-
-
1
subject.yaml_populate_collection collection: concern_objects, out: "outboard", populate_works: true, target_filename: "filename",
-
target_dirname: "directoryname"
-
end
-
end
-
end
-
-
1
context "when out argument is nil" do
-
1
skip "Add tests"
-
end
-
end
-
-
1
describe "#yaml_populate_stats" do
-
1
before {
-
1
allow(subject).to receive(:human_readable_size).with(0).and_return 100
-
}
-
1
it "returns Hash" do
-
1
expected_hash = {:total_collections_exported => 0,
-
:total_works_exported => 0,
-
:total_file_sets_exported => 0,
-
:total_file_sets_size_exported => 0,
-
:total_file_sets_size_readable_exported => 100,
-
:total_users_exported => 0 }
-
1
expect(subject.yaml_populate_stats).to eq expected_hash
-
end
-
end
-
-
1
describe "#yaml_populate_users" do
-
1
context "when out argument is not nil" do
-
1
before {
-
1
allow(Dir).to receive(:mkdir).with(anything)
-
1
allow(subject).to receive(:yaml_header_populate).with( "outside", indent: "", rake_task: 'umrdr:populate_users', target_filename: "filename" )
-
1
allow(subject).to receive(:yaml_header_users).with( "outside", indent: " ")
-
1
allow(User).to receive(:all).and_return ["user1"]
-
1
allow(subject).to receive(:yaml_body_users).with( "outside",
-
indent_base: " ",
-
indent: " ",
-
users: ["user1"])
-
1
allow(subject).to receive(:yaml_body_user_body).with( "outside",
-
indent_base: " ",
-
indent: " ",
-
user:"user1")
-
}
-
1
it "returns nil" do
-
1
expect(subject).to receive(:yaml_header_populate).with( "outside", indent: "", rake_task: 'umrdr:populate_users', target_filename: "filename" )
-
1
expect(subject).to receive(:yaml_header_users).with( "outside", indent: " ")
-
1
expect(subject).to receive(:yaml_body_users).with( "outside",
-
indent_base: " ",
-
indent: " ",
-
users: ["user1"])
-
1
expect(subject).to receive(:yaml_body_user_body).with( "outside",
-
indent_base: " ",
-
indent: " ",
-
user:"user1")
-
-
1
expect(subject.yaml_populate_users out: "outside", target_filename: "filename").to be_blank
-
end
-
end
-
-
1
context "when out argument is nil" do
-
1
skip "Add tests"
-
end
-
end
-
-
1
describe "#yaml_populate_work" do
-
1
context "when out argument is not nil" do
-
1
before {
-
2
allow(subject).to receive(:log_provenance_migrate).with( curation_concern: "concern" )
-
2
allow(subject).to receive(:yaml_header_populate).with( "I'm going out", indent: "", target_filename: "filename" )
-
2
allow(subject).to receive(:yaml_header).with( "I'm going out",
-
indent: " ",
-
curation_concern: "concern",
-
header_type: ':works:' )
-
2
allow(subject).to receive(:yaml_body_works).with( "I'm going out", indent: " ", curation_concern: "concern" )
-
2
allow(subject).to receive(:yaml_body_files).with( "I'm going out",
-
indent_base: " ",
-
indent: " ",
-
curation_concern: "concern",
-
target_dirname: "dirname" )
-
}
-
1
context "when mode is MetadataHelper::MODE_MIGRATE" do
-
1
before {
-
1
allow(subject).to receive(:mode).and_return "migrate"
-
}
-
1
it 'calls log_provenance_migrate, other functions and returns nil' do
-
1
expect(subject).to receive(:log_provenance_migrate).with( curation_concern: "concern" )
-
1
expect(subject).to receive(:yaml_header_populate).with( "I'm going out", indent: "", target_filename: "filename" )
-
1
expect(subject).to receive(:yaml_header).with( "I'm going out",
-
indent: " ",
-
curation_concern: "concern",
-
header_type: ':works:' )
-
1
expect(subject).to receive(:yaml_body_works).with( "I'm going out", indent: " ", curation_concern: "concern" )
-
1
expect(subject).to receive(:yaml_body_files).with( "I'm going out",
-
indent_base: " ",
-
indent: " ",
-
curation_concern: "concern",
-
target_dirname: "dirname" )
-
-
1
expect(subject.yaml_populate_work(curation_concern: "concern", out: "I'm going out", target_filename: "filename",
-
target_dirname: "dirname")).to be_blank
-
end
-
end
-
1
context "when mode is not MetadataHelper::MODE_MIGRATE" do
-
1
before {
-
1
allow(subject).to receive(:mode).and_return "work"
-
}
-
1
it 'does not call log_provenance_migrate, calls various functions and returns nil' do
-
1
expect(subject).not_to receive(:log_provenance_migrate)
-
1
expect(subject).to receive(:yaml_header_populate).with( "I'm going out", indent: "", target_filename: "filename" )
-
1
expect(subject).to receive(:yaml_header).with( "I'm going out",
-
indent: " ",
-
curation_concern: "concern",
-
header_type: ':works:' )
-
1
expect(subject).to receive(:yaml_body_works).with( "I'm going out", indent: " ", curation_concern: "concern" )
-
1
expect(subject).to receive(:yaml_body_files).with( "I'm going out",
-
indent_base: " ",
-
indent: " ",
-
curation_concern: "concern",
-
target_dirname: "dirname" )
-
-
1
expect(subject.yaml_populate_work(curation_concern: "concern", out: "I'm going out", target_filename: "filename",
-
target_dirname: "dirname")).to be_blank
-
end
-
end
-
end
-
-
1
context "when out argument is nil" do
-
1
skip "Add tests"
-
end
-
end
-
-
1
describe "#yaml_targetdir" do
-
1
context "when called with an object that is not a Pathname" do
-
1
it "creates a new Pathname and returns text" do
-
1
skip "Add a test"
-
end
-
end
-
-
1
context "when called with a Pathname" do
-
1
it "returns text" do
-
1
skip "Add a test"
-
end
-
end
-
end
-
-
1
describe "#yaml_targetdir_collection" do
-
1
before {
-
1
allow(subject).to receive(:yaml_targetdir).with(pathname_dir: "pathname dir", id: "collection id", prefix: "c_", task: "populate")
-
.and_return "population"
-
}
-
1
it "calls yaml_targetdir" do
-
1
expect(subject.yaml_targetdir_collection pathname_dir: "pathname dir", collection: OpenStruct.new(id: 'collection id')).to eq "population"
-
end
-
end
-
-
1
describe "#yaml_targetdir_users" do
-
1
before {
-
1
allow(subject).to receive(:yaml_targetdir).with(pathname_dir: "pathname dir", id: "", prefix: "users", task: "populate")
-
.and_return "population"
-
}
-
1
it "calls yaml_targetdir" do
-
1
expect(subject.yaml_targetdir_users pathname_dir: "pathname dir").to eq "population"
-
end
-
end
-
-
1
describe "#yaml_targetdir_work" do
-
1
before {
-
1
allow(subject).to receive(:yaml_targetdir).with(pathname_dir: "pathname dir", id: "work id", prefix: "w_", task: "populate")
-
.and_return "yaml targetdir"
-
}
-
1
it "calls yaml_targetdir" do
-
1
expect(subject.yaml_targetdir_work pathname_dir: "pathname dir", work: OpenStruct.new(id: 'work id')).to eq "yaml targetdir"
-
end
-
end
-
-
1
describe "#yaml_user_email" do
-
1
it "returns string" do
-
1
expect(subject.yaml_user_email OpenStruct.new(email: "bpotter@example.com")).to eq "user_bpotter@example.com"
-
end
-
end
-
-
1
describe "#yaml_work_export_files" do
-
1
exception = StandardError.new("error message")
-
1
exception.set_backtrace("backtrace")
-
-
1
context "when error occurs" do
-
1
before {
-
1
allow(subject).to receive(:open).with("dirname", "w").and_raise( exception )
-
1
allow(subject).to receive(:puts).with "StandardError: error message at backtrace"
-
}
-
1
it "catch error" do
-
1
expect(subject).to receive(:open).with("dirname", "w").and_raise( exception )
-
1
expect(subject).to receive(:puts).with "StandardError: error message at backtrace"
-
-
1
subject.yaml_work_export_files(work: "work", target_dirname: ["dirname"], log_filename: nil)
-
end
-
end
-
-
1
context "when error does not occur" do
-
1
skip "Add tests"
-
end
-
end
-
-
1
describe "#yaml_work_find" do
-
1
context "when source is 'DBDv2'" do
-
1
before {
-
1
allow(DataSet).to receive(:find).and_return "DataSet find"
-
}
-
1
it "calls DataSet.find" do
-
1
expect(subject.yaml_work_find curation_concern: "concern").to eq "DataSet find"
-
end
-
end
-
-
1
context "when source is not 'DBDv2'" do
-
1
before {
-
1
allow(subject).to receive(:source).and_return "DBDv1"
-
1
allow(GenericWork).to receive(:find).and_return "GenericWork find"
-
}
-
1
it "calls GenericWork.find" do
-
1
expect(subject.yaml_work_find curation_concern: "concern").to eq "GenericWork find"
-
end
-
end
-
end
-
-
1
pending "#self.init_attribute_names_always_include_cc"
-
end
-
1
RSpec.configure do |config|
-
1
config.include FactoryBot::Syntax::Methods
-
end
-
1
module Hyrax
-
1
module FactoryHelpers
-
1
module_function
-
-
1
def mock_file_factory(opts = {})
-
mock_model('MockOriginal',
-
mime_type: opts.fetch(:mime_type, 'text/plain'),
-
content: opts.fetch(:content, 'content'),
-
file_size: opts.fetch(:file_size, []),
-
format_label: opts.fetch(:format_label, []),
-
height: opts.fetch(:height, []),
-
width: opts.fetch(:width, []),
-
filename: opts.fetch(:filename, []),
-
well_formed: opts.fetch(:well_formed, []),
-
page_count: opts.fetch(:page_count, []),
-
file_title: opts.fetch(:file_title, []),
-
last_modified: opts.fetch(:last_modified, []),
-
original_checksum: opts.fetch(:original_checksum, []),
-
digest: opts.fetch(:digest, []),
-
duration: opts.fetch(:duration, []),
-
sample_rate: opts.fetch(:sample_rate, []))
-
end
-
end
-
end
-
1
RSpec.describe '/_user_util_links.html.erb', type: :view do
-
-
1
before do
-
2
allow(view).to receive(:user_signed_in?).and_return(false)
-
2
allow(view).to receive(:current_user).and_return(nil)
-
end
-
-
1
it 'IU Login should go to CAS' do
-
1
Rails.configuration.authentication_method = "iu"
-
1
render
-
1
expect(rendered).to have_link "IU Login"
-
end
-
-
1
it 'Other login should go to regular login' do
-
1
Rails.configuration.authentication_method = "umich"
-
1
render
-
1
expect(rendered).to have_link 'Login'
-
end
-
-
end